Skip to content

Commit

Permalink
Test passes
Browse files Browse the repository at this point in the history
  • Loading branch information
EmmaRenauld committed Sep 27, 2024
1 parent f1d342a commit 484809b
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 28 deletions.
15 changes: 7 additions & 8 deletions scilpy/connectivity/connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,6 @@ def multi_proc_compute_connectivity_matrices_from_hdf5(args):
compute_volume, compute_streamline_count, compute_length,
similarity_directory, metrics_data, metrics_names, lesion_data,
include_dps, weighted, min_lesion_vol) = args

print("Multiprocessing, ID {}: computing info for bundle {}."
.format(d.id, comb))
return compute_connectivity_matrices_from_hdf5(
hdf5_filename, labels_img, comb[0], comb[1],
compute_volume, compute_streamline_count, compute_length,
Expand Down Expand Up @@ -79,10 +76,12 @@ def compute_connectivity_matrices_from_hdf5(
length of streamlines in the bundle.
similarity_directory: str
If not None, ??
metrics: Tuple[list[np.ndarray], list[str]]
metrics_data: list[np.ndarray]
List of 3D data with metrics to use, with the list of associated metric
names. If set, the returned dictionary will contain an entry for each
name, with the mean value of each metric.
metrics_names: list[str]
The metrics names.
lesion_data: Tuple[list, np.ndarray]
The (lesion_labels, lesion_data) for lesion load analysis. If set, the
returned dictionary will contain the three entries 'lesion_volume':
Expand All @@ -98,11 +97,11 @@ def compute_connectivity_matrices_from_hdf5(
Returns
-------
final_dict: {(in_label, out_label): (measures_dict, dps_keys)}
A dictionary with the node as key and as value:
measures_dict: The dictionary of returned values.
final_dict: Tuple[dict, list[str]] or None
dict: {(in_label, out_label): measures_dict}
A dictionary with the node as key and as the dictionary as
described above.
dps_keys: The list of keys included from dps.
If the connection is not found, None is returned instead.
"""
if len(metrics_data) > 0:
assert len(metrics_data) == len(metrics_names)
Expand Down
30 changes: 10 additions & 20 deletions scripts/scil_connectivity_compute_matrices.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,13 +184,13 @@ def fill_matrix_and_save(measures_dict, labels_list, measure_keys, filenames):
matrix = np.zeros((len(labels_list), len(labels_list), len(measure_keys)))

# Run one loop on node. Fill all matrices at once.
for in_label, out_label in measures_dict:
curr_node_dict = measures_dict[(in_label, out_label)]
for i, key in enumerate(measure_keys):
for label_key, node_values in measures_dict.items():
in_label, out_label = label_key
for i, measure_key in enumerate(measure_keys):
in_pos = labels_list.index(in_label)
out_pos = labels_list.index(out_label)
matrix[in_pos, out_pos, i] = curr_node_dict[key]
matrix[out_pos, in_pos, i] = curr_node_dict[key]
matrix[in_pos, out_pos, i] = node_values[measure_key]
matrix[out_pos, in_pos, i] = node_values[measure_key]

for i, f in enumerate(filenames):
logging.info("Saving resulting {} in file {}"
Expand Down Expand Up @@ -266,12 +266,7 @@ def main():
lesion_data, args.include_dps, args.density_weighting,
args.min_lesion_vol))
else:
def set_num(counter):
d.id = next(counter) + 1

logging.info("PREPARING MULTIPOOLING: {}".format(comb_list))
pool = multiprocessing.Pool(nbr_cpu, initializer=set_num,
initargs=(itertools.count(),))
pool = multiprocessing.Pool(nbr_cpu)

# Dividing the process bundle by bundle
outputs = pool.map(
Expand Down Expand Up @@ -301,8 +296,6 @@ def set_num(counter):

measures_dict_list = [it[0] for it in outputs]
dps_keys = [it[1] for it in outputs]
logging.info("GOT dps {}".format(dps_keys))
logging.info("GOT dicts {}".format(measures_dict_list))

# Verify that all bundles had the same dps_keys
if len(dps_keys) > 1 and not dps_keys[1:] == dps_keys[:-1]:
Expand All @@ -316,23 +309,20 @@ def set_num(counter):
for node in measures_dict_list:
measures_dict.update(node)

logging.info("GOT dps {}".format(dps_keys))
logging.info("GOT dicts {}".format(measures_dict))

# Filling out all the matrices (symmetric) in the order of labels_list
keys = []
filenames = []
if compute_volume:
keys.append(['volume'])
keys.append('volume')
filenames.append(args.volume)
if compute_length:
keys.append(['length'])
keys.append('length')
filenames.append(args.length)
if compute_streamline_count:
keys.append(['streamline_count'])
keys.append('streamline_count')
filenames.append(args.streamline_count)
if similarity_directory is not None:
keys.append(['similarity'])
keys.append('similarity')
filenames.append(args.similarity[1])
if len(args.metrics) > 0:
keys.extend(metrics_names)
Expand Down

0 comments on commit 484809b

Please sign in to comment.