From 668d27f64ddb8b82c3091ebf8b49e1d37006a141 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Wed, 13 Dec 2023 16:31:12 -0500 Subject: [PATCH 01/63] fs subject folder can now include session name --- scripts/scil_validate_bids.py | 61 ++++++++++++++++++++++------------- 1 file changed, 38 insertions(+), 23 deletions(-) diff --git a/scripts/scil_validate_bids.py b/scripts/scil_validate_bids.py index 6d2a56f68b..94f1a5bf3e 100755 --- a/scripts/scil_validate_bids.py +++ b/scripts/scil_validate_bids.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- """ -Create a json file from a BIDS dataset detailling all info +Create a json file from a BIDS dataset detailling all info needed for tractoflow - DWI/rev_DWI - T1 @@ -294,7 +294,7 @@ def get_data(layout, nSub, dwis, t1s, fs, default_readout, clean): else: t1_paths = [curr_t1.path for curr_t1 in t1_nSess] logging.warning('More than one T1 file found.' - ' [{}]'.format(','.join(t1_paths))) + ' [{}]'.format(','.join(t1_paths))) return {'subject': nSub, 'session': nSess, @@ -409,7 +409,7 @@ def associate_dwis(layout, nSub): if rev_curr_entity[direction] == rev_dwi.entities[direction]: # Print difference between entities logging.warning('DWIs {} and {} have opposite phase encoding directions but different entities.' - 'Please check their respective json files.'.format(curr_dwi, rev_dwi)) + 'Please check their respective json files.'.format(curr_dwi, rev_dwi)) # drop all rev_dwi used logging.info('Checking dwi {}'.format(all_dwis[0])) @@ -458,28 +458,43 @@ def main(): logging.info("-" * len(mess)) logging.info(mess) dwis = associate_dwis(layout, nSub) - fs_inputs = [] - t1s = [] - - if args.fs: - abs_fs = os.path.abspath(args.fs) - logging.info("Looking for FS files") - t1_fs = glob(os.path.join(abs_fs, 'sub-' + nSub, 'mri/T1.mgz')) - wmparc = glob(os.path.join(abs_fs, 'sub-' + nSub, 'mri/wmparc.mgz')) - aparc_aseg = glob(os.path.join(abs_fs, 'sub-' + nSub, - 'mri/aparc+aseg.mgz')) - if len(t1_fs) == 1 and len(wmparc) == 1 and len(aparc_aseg) == 1: - fs_inputs = [t1_fs[0], wmparc[0], aparc_aseg[0]] - logging.info("Found FS files") - else: - logging.info("Looking for T1 files") - t1s = layout.get(subject=nSub, - datatype='anat', extension='nii.gz', - suffix='T1w') - if t1s: - logging.info("Found {} T1 files".format(len(t1s))) + # Get the data for each run of DWIs for dwi in dwis: + fs_inputs = [] + t1s = [] + if args.fs: + abs_fs = os.path.abspath(args.fs) + + logging.info("Looking for FS files") + test_fs_sub_path = os.path.join(abs_fs, 'sub-' + nSub) + fs_sub_path = "" + if os.path.exists(test_fs_sub_path): + fs_sub_path = test_fs_sub_path + elif 'session' in dwi[0].entities: + nSess = dwi[0].entities['session'] + test_fs_sub_path = os.path.join(abs_fs, 'sub-' + nSub + '_ses-' + nSess) + if os.path.exists(test_fs_sub_path): + fs_sub_path = test_fs_sub_path + + if fs_sub_path: + t1_fs = glob(os.path.join(fs_sub_path, 'mri/T1.mgz')) + wmparc = glob(os.path.join(fs_sub_path, 'mri/wmparc.mgz')) + aparc_aseg = glob(os.path.join(fs_sub_path, 'mri/aparc+aseg.mgz')) + + if len(t1_fs) == 1 and len(wmparc) == 1 and len(aparc_aseg) == 1: + fs_inputs = [t1_fs[0], wmparc[0], aparc_aseg[0]] + logging.info("Found FS files") + else: + logging.info("NOT Found FS files") + else: + logging.info("Looking for T1 files") + t1s = layout.get(subject=nSub, + datatype='anat', extension='nii.gz', + suffix='T1w') + if t1s: + logging.info("Found {} T1 files".format(len(t1s))) + data.append(get_data(layout, nSub, dwi, From 91758057a8acf9b0a7e10269c8736a218880a9a1 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 14 Dec 2023 11:09:31 -0500 Subject: [PATCH 02/63] rename scil_validate_bids --- scripts/legacy/scil_validate_bids.py | 21 +++++++++++++++++++ ...validate_bids.py => scil_bids_validate.py} | 0 ...validate_bids.py => test_bids_validate.py} | 10 ++++----- 3 files changed, 26 insertions(+), 5 deletions(-) create mode 100644 scripts/legacy/scil_validate_bids.py rename scripts/{scil_validate_bids.py => scil_bids_validate.py} (100%) rename scripts/tests/{test_validate_bids.py => test_bids_validate.py} (98%) diff --git a/scripts/legacy/scil_validate_bids.py b/scripts/legacy/scil_validate_bids.py new file mode 100644 index 0000000000..2b833d3920 --- /dev/null +++ b/scripts/legacy/scil_validate_bids.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_bids_validate import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_bids_validate.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_validate_bids.py", DEPRECATION_MSG, + '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/scil_validate_bids.py b/scripts/scil_bids_validate.py similarity index 100% rename from scripts/scil_validate_bids.py rename to scripts/scil_bids_validate.py diff --git a/scripts/tests/test_validate_bids.py b/scripts/tests/test_bids_validate.py similarity index 98% rename from scripts/tests/test_validate_bids.py rename to scripts/tests/test_bids_validate.py index 448cf138f3..2da561d122 100644 --- a/scripts/tests/test_validate_bids.py +++ b/scripts/tests/test_bids_validate.py @@ -325,7 +325,7 @@ def compare_jsons(json_output, test_dir): def test_help_option(script_runner): - ret = script_runner.run('scil_validate_bids.py', '--help') + ret = script_runner.run('scil_bids_validate.py', '--help') assert ret.success @@ -344,7 +344,7 @@ def test_bids_epi(tmpdir, script_runner, dwi_is_complex, json_output): complex_dwi=dwi_is_complex) ret = script_runner.run( - 'scil_validate_bids.py', + 'scil_bids_validate.py', test_dir, os.path.join(test_dir, json_output), '-f', '-v') @@ -371,7 +371,7 @@ def test_bids_sbref( complex_sbref=sbref_is_complex) ret = script_runner.run( - 'scil_validate_bids.py', + 'scil_bids_validate.py', test_dir, os.path.join(test_dir, json_output), '-f', '-v') @@ -397,7 +397,7 @@ def test_bids_rev_dwi( complex_rev_dwi=rev_is_complex) ret = script_runner.run( - 'scil_validate_bids.py', + 'scil_bids_validate.py', test_dir, os.path.join(test_dir, json_output), '-f', '-v') @@ -425,7 +425,7 @@ def test_bids_rev_dwi_sbref( complex_rev_dwi=rev_is_complex) ret = script_runner.run( - 'scil_validate_bids.py', + 'scil_bids_validate.py', test_dir, os.path.join(test_dir, json_output), '-f', '-v') From daa1df8ae2d030249fd74f9bc951b75f399df2a1 Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Thu, 14 Dec 2023 11:38:57 -0500 Subject: [PATCH 03/63] Merge asym filtering scripts --- scilpy/denoise/asym_averaging.py | 123 ---------- ...lateral_filtering.py => asym_filtering.py} | 224 +++++++++++------- scilpy/denoise/tests/test_asym_filtering.py | 46 ++++ .../denoise/tests/test_bilateral_filtering.py | 52 ---- scilpy/io/utils.py | 2 +- scilpy/tests/arrays.py | 130 +++++++++- scripts/scil_sh_to_aodf.py | 155 ++++++++++++ .../test_execute_asymmetric_filtering.py | 52 ---- ...ateral_filtering.py => test_sh_to_aodf.py} | 45 ++-- 9 files changed, 496 insertions(+), 333 deletions(-) delete mode 100644 scilpy/denoise/asym_averaging.py rename scilpy/denoise/{bilateral_filtering.py => asym_filtering.py} (73%) create mode 100644 scilpy/denoise/tests/test_asym_filtering.py delete mode 100644 scilpy/denoise/tests/test_bilateral_filtering.py create mode 100644 scripts/scil_sh_to_aodf.py delete mode 100644 scripts/tests/test_execute_asymmetric_filtering.py rename scripts/tests/{test_execute_angle_aware_bilateral_filtering.py => test_sh_to_aodf.py} (73%) diff --git a/scilpy/denoise/asym_averaging.py b/scilpy/denoise/asym_averaging.py deleted file mode 100644 index 2c0bfeacf4..0000000000 --- a/scilpy/denoise/asym_averaging.py +++ /dev/null @@ -1,123 +0,0 @@ -# -*- coding: utf-8 -*- - -import numpy as np -from dipy.reconst.shm import sh_to_sf_matrix -from dipy.data import get_sphere -from dipy.core.sphere import Sphere -from scipy.ndimage import correlate - - -def local_asym_filtering(in_sh, sh_order=8, sh_basis='descoteaux07', - in_full_basis=False, dot_sharpness=1.0, - sphere_str='repulsion724', sigma=1.0): - """Average the SH projected on a sphere using a first-neighbor gaussian - blur and a dot product weight between sphere directions and the direction - to neighborhood voxels, forcing to 0 negative values and thus performing - asymmetric hemisphere-aware filtering. - - Parameters - ---------- - in_sh: ndarray (x, y, z, n_coeffs) - Input SH coefficients array - sh_order: int, optional - Maximum order of the SH series. - sh_basis: {'descoteaux07', 'tournier07'}, optional - SH basis of the input signal. - in_full_basis: bool, optional - True if the input is in full SH basis. - dot_sharpness: float, optional - Exponent of the dot product. When set to 0.0, directions - are not weighted by the dot product. - sphere_str: str, optional - Name of the sphere used to project SH coefficients to SF. - sigma: float, optional - Sigma for the Gaussian. - - Returns - ------- - out_sh: ndarray (x, y, z, n_coeffs) - Filtered signal as SH coefficients in full SH basis. - """ - # Load the sphere used for projection of SH - sphere = get_sphere(sphere_str) - - # Normalized filter for each sf direction - weights = _get_weights(sphere, dot_sharpness, sigma) - - nb_sf = len(sphere.vertices) - mean_sf = np.zeros(np.append(in_sh.shape[:-1], nb_sf)) - B = sh_to_sf_matrix(sphere, sh_order=sh_order, basis_type=sh_basis, - return_inv=False, full_basis=in_full_basis) - - # We want a B matrix to project on an inverse sphere to have the sf on - # the opposite hemisphere for a given vertice - neg_B = sh_to_sf_matrix(Sphere(xyz=-sphere.vertices), sh_order=sh_order, - basis_type=sh_basis, return_inv=False, - full_basis=in_full_basis) - - # Apply filter to each sphere vertice - for sf_i in range(nb_sf): - w_filter = weights[..., sf_i] - - # Calculate contribution of center voxel - current_sf = np.dot(in_sh, B[:, sf_i]) - mean_sf[..., sf_i] = w_filter[1, 1, 1] * current_sf - - # Add contributions of neighbors using opposite hemispheres - current_sf = np.dot(in_sh, neg_B[:, sf_i]) - w_filter[1, 1, 1] = 0.0 - mean_sf[..., sf_i] += correlate(current_sf, w_filter, mode="constant") - - # Convert back to SH coefficients - _, B_inv = sh_to_sf_matrix(sphere, sh_order=sh_order, - basis_type=sh_basis, - full_basis=True) - - out_sh = np.array([np.dot(i, B_inv) for i in mean_sf], dtype=in_sh.dtype) - return out_sh - - -def _get_weights(sphere, dot_sharpness, sigma): - """ - Get neighbors weight in respect to the direction to a voxel. - - Parameters - ---------- - sphere: Sphere - Sphere used for SF reconstruction. - dot_sharpness: float - Dot product exponent. - sigma: float - Variance of the gaussian used for weighting neighbors. - - Returns - ------- - weights: dictionary - Vertices weights with respect to voxel directions. - """ - directions = np.zeros((3, 3, 3, 3)) - for x in range(3): - for y in range(3): - for z in range(3): - directions[x, y, z, 0] = x - 1 - directions[x, y, z, 1] = y - 1 - directions[x, y, z, 2] = z - 1 - - non_zero_dir = np.ones((3, 3, 3), dtype=bool) - non_zero_dir[1, 1, 1] = False - - # normalize dir - dir_norm = np.linalg.norm(directions, axis=-1, keepdims=True) - directions[non_zero_dir] /= dir_norm[non_zero_dir] - - g_weights = np.exp(-dir_norm**2 / (2 * sigma**2)) - d_weights = np.dot(directions, sphere.vertices.T) - - d_weights = np.where(d_weights > 0.0, d_weights**dot_sharpness, 0.0) - weights = d_weights * g_weights - weights[1, 1, 1, :] = 1.0 - - # Normalize filters so that all sphere directions weights sum to 1 - weights /= weights.reshape((-1, weights.shape[-1])).sum(axis=0) - - return weights diff --git a/scilpy/denoise/bilateral_filtering.py b/scilpy/denoise/asym_filtering.py similarity index 73% rename from scilpy/denoise/bilateral_filtering.py rename to scilpy/denoise/asym_filtering.py index 6fe32c6585..bb185290f4 100644 --- a/scilpy/denoise/bilateral_filtering.py +++ b/scilpy/denoise/asym_filtering.py @@ -1,22 +1,19 @@ # -*- coding: utf-8 -*- import numpy as np -import multiprocessing -import itertools from dipy.reconst.shm import sh_to_sf_matrix from dipy.data import get_sphere -from scilpy.gpuparallel.opencl_utils import (have_opencl, CLKernel, CLManager) +from dipy.core.sphere import Sphere +from scipy.ndimage import correlate +from scilpy.gpuparallel.opencl_utils import have_opencl, CLKernel, CLManager def angle_aware_bilateral_filtering(in_sh, sh_order=8, sh_basis='descoteaux07', in_full_basis=False, sphere_str='repulsion724', - sigma_spatial=1.0, - sigma_angular=1.0, - sigma_range=0.5, - use_gpu=True, - nbr_processes=1): + sigma_spatial=1.0, sigma_angular=1.0, + sigma_range=0.5, use_gpu=True): """ Angle-aware bilateral filtering. @@ -40,8 +37,6 @@ def angle_aware_bilateral_filtering(in_sh, sh_order=8, Standard deviation for range filter. use_gpu: bool, optional True if GPU should be used. - nbr_processes: int, optional - Number of processes to use. Returns ------- @@ -60,8 +55,7 @@ def angle_aware_bilateral_filtering(in_sh, sh_order=8, return angle_aware_bilateral_filtering_cpu(in_sh, sh_order, sh_basis, in_full_basis, sphere_str, sigma_spatial, - sigma_angular, sigma_range, - nbr_processes) + sigma_angular, sigma_range) def angle_aware_bilateral_filtering_gpu(in_sh, sh_order=8, @@ -159,8 +153,7 @@ def angle_aware_bilateral_filtering_cpu(in_sh, sh_order=8, sphere_str='repulsion724', sigma_spatial=1.0, sigma_angular=1.0, - sigma_range=0.5, - nbr_processes=1): + sigma_range=0.5): """ Angle-aware bilateral filtering on the CPU (optionally using multiple threads). @@ -183,8 +176,6 @@ def angle_aware_bilateral_filtering_cpu(in_sh, sh_order=8, Standard deviation for angular filter. sigma_range: float, optional Standard deviation for range filter. - nbr_processes: int, optional - Number of processes to use. Returns ------- @@ -205,32 +196,16 @@ def angle_aware_bilateral_filtering_cpu(in_sh, sh_order=8, B = sh_to_sf_matrix(sphere, sh_order=sh_order, basis_type=sh_basis, return_inv=False, full_basis=in_full_basis) - if nbr_processes > 1: - # Apply filter to each sphere vertice in parallel - pool = multiprocessing.Pool(nbr_processes) - - # divide the sphere directions among the processes - base_chunk_size = int(nb_sf / nbr_processes + 0.5) - first_ids = np.arange(0, nb_sf, base_chunk_size) - residuals = nb_sf - first_ids - chunk_sizes = np.where(residuals < base_chunk_size, - residuals, base_chunk_size) - res = pool.map(_process_subset_directions, - zip(itertools.repeat(weights), - itertools.repeat(in_sh), - first_ids, - chunk_sizes, - itertools.repeat(B), - itertools.repeat(sigma_range))) - pool.close() - pool.join() - - # Patch chunks together. - mean_sf = np.concatenate(res, axis=-1) - else: - args = [weights, in_sh, 0, nb_sf, - B, sigma_range] - mean_sf = _process_subset_directions(args) + mean_sf = np.zeros(in_sh.shape[:-1] + (nb_sf,)) + + # Apply filter to each sphere vertice + for sph_id in range(nb_sf): + w_filter = weights[..., sph_id] + + # Generate 1-channel images for directions u and -u + current_sf = np.dot(in_sh, B[:, sph_id]) + mean_sf[..., sph_id] = _correlate_spatial(current_sf, w_filter, + sigma_range) # Convert back to SH coefficients _, B_inv = sh_to_sf_matrix(sphere, sh_order=sh_order, basis_type=sh_basis, @@ -352,54 +327,6 @@ def _get_angular_weights(shape, sphere, sigma_angular): return angular_weights -def _process_subset_directions(args): - """ - Filter a subset of all sphere directions. - - Parameters - ---------- - args: List - args[0]: weights, ndarray - Filter weights per direction. - args[1]: in_sh, ndarray - Input SH coefficients array. - args[2]: first_dir_id, int - ID of first sphere direction. - args[3]: chunk_size, int - Number of sphere directions in chunk. - args[4]: B, ndarray - SH to SF matrix for current sphere directions. - args[5]: neg_B, ndarray - SH to SF matrix for opposite sphere directions. - args[6]: sigma_range, int - Sigma of the Gaussian use for range filtering. - - Returns - ------- - out_sf: ndarray - SF array for subset directions. - """ - weights = args[0] - in_sh = args[1] - first_dir_id = args[2] - chunk_size = args[3] - B = args[4] - sigma_range = args[5] - - out_sf = np.zeros(in_sh.shape[:-1] + (chunk_size,)) - # Apply filter to each sphere vertice - for offset_i in range(chunk_size): - sph_id = first_dir_id + offset_i - w_filter = weights[..., sph_id] - - # Generate 1-channel images for directions u and -u - current_sf = np.dot(in_sh, B[:, sph_id]) - out_sf[..., offset_i] = _correlate_spatial(current_sf, - w_filter, - sigma_range) - return out_sf - - def _correlate_spatial(image_u, h_filter, sigma_range): """ Implementation of the correlation operation for anisotropic filtering. @@ -441,3 +368,120 @@ def _correlate_spatial(image_u, h_filter, sigma_range): out_im[ii, jj, kk] /= np.sum(res_filter) return out_im + + +def cosine_filtering(in_sh, sh_order=8, sh_basis='descoteaux07', + in_full_basis=False, dot_sharpness=1.0, + sphere_str='repulsion724', sigma=1.0): + """ + Average the SH projected on a sphere using a first-neighbor gaussian + blur and a dot product weight between sphere directions and the direction + to neighborhood voxels, forcing to 0 negative values and thus performing + asymmetric hemisphere-aware filtering. + + Parameters + ---------- + in_sh: ndarray (x, y, z, n_coeffs) + Input SH coefficients array + sh_order: int, optional + Maximum order of the SH series. + sh_basis: {'descoteaux07', 'tournier07'}, optional + SH basis of the input signal. + in_full_basis: bool, optional + True if the input is in full SH basis. + dot_sharpness: float, optional + Exponent of the dot product. When set to 0.0, directions + are not weighted by the dot product. + sphere_str: str, optional + Name of the sphere used to project SH coefficients to SF. + sigma: float, optional + Sigma for the Gaussian. + + Returns + ------- + out_sh: ndarray (x, y, z, n_coeffs) + Filtered signal as SH coefficients in full SH basis. + """ + # Load the sphere used for projection of SH + sphere = get_sphere(sphere_str) + + # Normalized filter for each sf direction + weights = _get_weights(sphere, dot_sharpness, sigma) + + nb_sf = len(sphere.vertices) + mean_sf = np.zeros(np.append(in_sh.shape[:-1], nb_sf)) + B = sh_to_sf_matrix(sphere, sh_order=sh_order, basis_type=sh_basis, + return_inv=False, full_basis=in_full_basis) + + # We want a B matrix to project on an inverse sphere to have the sf on + # the opposite hemisphere for a given vertice + neg_B = sh_to_sf_matrix(Sphere(xyz=-sphere.vertices), sh_order=sh_order, + basis_type=sh_basis, return_inv=False, + full_basis=in_full_basis) + + # Apply filter to each sphere vertice + for sf_i in range(nb_sf): + w_filter = weights[..., sf_i] + + # Calculate contribution of center voxel + current_sf = np.dot(in_sh, B[:, sf_i]) + mean_sf[..., sf_i] = w_filter[1, 1, 1] * current_sf + + # Add contributions of neighbors using opposite hemispheres + current_sf = np.dot(in_sh, neg_B[:, sf_i]) + w_filter[1, 1, 1] = 0.0 + mean_sf[..., sf_i] += correlate(current_sf, w_filter, mode="constant") + + # Convert back to SH coefficients + _, B_inv = sh_to_sf_matrix(sphere, sh_order=sh_order, + basis_type=sh_basis, + full_basis=True) + + out_sh = np.array([np.dot(i, B_inv) for i in mean_sf], dtype=in_sh.dtype) + return out_sh + + +def _get_weights(sphere, dot_sharpness, sigma): + """ + Get neighbors weight in respect to the direction to a voxel. + + Parameters + ---------- + sphere: Sphere + Sphere used for SF reconstruction. + dot_sharpness: float + Dot product exponent. + sigma: float + Variance of the gaussian used for weighting neighbors. + + Returns + ------- + weights: dictionary + Vertices weights with respect to voxel directions. + """ + directions = np.zeros((3, 3, 3, 3)) + for x in range(3): + for y in range(3): + for z in range(3): + directions[x, y, z, 0] = x - 1 + directions[x, y, z, 1] = y - 1 + directions[x, y, z, 2] = z - 1 + + non_zero_dir = np.ones((3, 3, 3), dtype=bool) + non_zero_dir[1, 1, 1] = False + + # normalize dir + dir_norm = np.linalg.norm(directions, axis=-1, keepdims=True) + directions[non_zero_dir] /= dir_norm[non_zero_dir] + + g_weights = np.exp(-dir_norm**2 / (2 * sigma**2)) + d_weights = np.dot(directions, sphere.vertices.T) + + d_weights = np.where(d_weights > 0.0, d_weights**dot_sharpness, 0.0) + weights = d_weights * g_weights + weights[1, 1, 1, :] = 1.0 + + # Normalize filters so that all sphere directions weights sum to 1 + weights /= weights.reshape((-1, weights.shape[-1])).sum(axis=0) + + return weights diff --git a/scilpy/denoise/tests/test_asym_filtering.py b/scilpy/denoise/tests/test_asym_filtering.py new file mode 100644 index 0000000000..7dd066cecf --- /dev/null +++ b/scilpy/denoise/tests/test_asym_filtering.py @@ -0,0 +1,46 @@ +import numpy as np + +from scilpy.denoise.asym_filtering import \ + angle_aware_bilateral_filtering_cpu, cosine_filtering +from scilpy.reconst.utils import get_sh_order_and_fullness +from scilpy.tests.arrays import ( + fodf_3x3_order8_descoteaux07, + fodf_3x3_order8_descoteaux07_filtered_bilateral, + fodf_3x3_order8_descoteaux07_filtered_cosine) + + +def test_angle_aware_bilateral_filtering(): + """ + Test angle_aware_bilateral_filtering_cpu on a simple 3x3 grid. + """ + in_sh = fodf_3x3_order8_descoteaux07 + sh_basis = 'descoteaux07' + sphere_str = 'repulsion100' + sigma_spatial = 1.0 + sigma_angular = 1.0 + sigma_range = 1.0 + + sh_order, full_basis = get_sh_order_and_fullness(in_sh.shape[-1]) + out = angle_aware_bilateral_filtering_cpu(in_sh, sh_order, + sh_basis, full_basis, + sphere_str, sigma_spatial, + sigma_angular, sigma_range) + + assert np.allclose(out, fodf_3x3_order8_descoteaux07_filtered_bilateral) + + +def test_cosine_filtering(): + """ + Test cosine filtering on a simple 3x3 grid. + """ + in_sh = fodf_3x3_order8_descoteaux07 + sh_basis = 'descoteaux07' + sphere_str = 'repulsion100' + sigma_spatial = 1.0 + sharpness = 1.0 + + sh_order, full_basis = get_sh_order_and_fullness(in_sh.shape[-1]) + out = cosine_filtering(in_sh, sh_order, sh_basis, full_basis, + sharpness, sphere_str, sigma_spatial) + + assert np.allclose(out, fodf_3x3_order8_descoteaux07_filtered_cosine) \ No newline at end of file diff --git a/scilpy/denoise/tests/test_bilateral_filtering.py b/scilpy/denoise/tests/test_bilateral_filtering.py deleted file mode 100644 index 58c73e696d..0000000000 --- a/scilpy/denoise/tests/test_bilateral_filtering.py +++ /dev/null @@ -1,52 +0,0 @@ -import numpy as np - -from scilpy.denoise.bilateral_filtering import \ - angle_aware_bilateral_filtering_cpu -from scilpy.reconst.utils import get_sh_order_and_fullness -from scilpy.tests.arrays import ( - fodf_3x3_order8_descoteaux07, fodf_3x3_order8_descoteaux07_filtered) - - -def _call_angle_aware_bilateral_filtering_cpu_n_processes(n_processes): - """ Call angle_aware_bilateral_filtering_cpu on a simple 3x3 grid - using an arbitrary number of processes. - """ - - in_sh = fodf_3x3_order8_descoteaux07 - sh_order = 8 - sh_basis = 'descoteaux07' - in_full_basis = False - sphere_str = 'repulsion100' - sigma_spatial = 1.0 - sigma_angular = 1.0 - sigma_range = 1.0 - nbr_processes = n_processes - - sh_order, full_basis = get_sh_order_and_fullness(in_sh.shape[-1]) - out = angle_aware_bilateral_filtering_cpu(in_sh, sh_order, - sh_basis, in_full_basis, - sphere_str, sigma_spatial, - sigma_angular, sigma_range, - nbr_processes) - - return out - - -def test_angle_aware_bilateral_filtering_cpu_one_process(): - """ Test angle_aware_bilateral_filtering_cpu on a simple 3x3 grid - using one process. - """ - - out = _call_angle_aware_bilateral_filtering_cpu_n_processes(1) - - assert np.allclose(out, fodf_3x3_order8_descoteaux07_filtered) - - -def test_angle_aware_bilateral_filtering_cpu_four_processes(): - """ Test angle_aware_bilateral_filtering_cpu on a simple 3x3 grid - using four processes. - """ - - out = _call_angle_aware_bilateral_filtering_cpu_n_processes(4) - - assert np.allclose(out, fodf_3x3_order8_descoteaux07_filtered) diff --git a/scilpy/io/utils.py b/scilpy/io/utils.py index 90c8aa6165..f6d3f0610e 100644 --- a/scilpy/io/utils.py +++ b/scilpy/io/utils.py @@ -254,7 +254,7 @@ def add_sh_basis_args(parser, mandatory=False): choices = ['descoteaux07', 'tournier07'] def_val = 'descoteaux07' help_msg = 'Spherical harmonics basis used for the SH coefficients. ' +\ - '\nMustbe either \'descoteaux07\' or \'tournier07\'' +\ + '\nMust be either \'descoteaux07\' or \'tournier07\'' +\ ' [%(default)s]:\n' +\ ' \'descoteaux07\': SH basis from the Descoteaux et al.\n' +\ ' MRM 2007 paper\n' +\ diff --git a/scilpy/tests/arrays.py b/scilpy/tests/arrays.py index 8def308349..b40826db30 100644 --- a/scilpy/tests/arrays.py +++ b/scilpy/tests/arrays.py @@ -78,7 +78,7 @@ 0., 0., 0., 0., 0., 0., 0., 0.]]]]) -fodf_3x3_order8_descoteaux07_filtered = np.asarray([[[[ +fodf_3x3_order8_descoteaux07_filtered_bilateral = np.asarray([[[[ 5.99904145e-02, -7.74841521e-03, -8.85743552e-04, -2.43733904e-02, -6.71300622e-02, 1.40698629e-03, -5.02532006e-02, -1.31764043e-03, 4.39820686e-02, @@ -243,6 +243,134 @@ 2.45072224e-04, 2.64311350e-04, -1.81628898e-04, 6.73057957e-04]]]]) + +fodf_3x3_order8_descoteaux07_filtered_cosine = np.array([[ + [[ 1.10403600e-01, -1.56274168e-02, -1.76220261e-03, -4.06611449e-02, + -1.30363567e-01, 3.75744696e-03, -9.97133892e-02, -3.49429274e-03, + 9.52583150e-02, 8.20238612e-03, -4.89889718e-03, 2.97695306e-03, + 4.70504991e-03, 2.74152194e-02, -1.06442787e-03, 3.09297306e-02, + 3.78406501e-02, 4.40385732e-03, 6.97472214e-02, -5.22356452e-03, + 5.54510683e-02, 4.19701656e-03, -4.61628754e-02, 1.04670478e-03, + -9.23817078e-02, -1.08910148e-02, 5.21920978e-04, -6.08726863e-03, + 5.44869381e-03, -2.53482527e-04, -2.27573225e-03, -1.40288007e-02, + 1.35667761e-03, -1.30331232e-02, 5.35207622e-04, -1.63039610e-02, + 7.97932344e-03, 8.02639457e-04, -1.21136261e-02, -1.83430175e-03, + -2.54608366e-02, 9.91489623e-04, -1.97747572e-02, -1.75031223e-03, + 1.49128199e-02, -1.43106403e-03, 2.66094509e-02, 8.57021815e-04, + 3.30743534e-02]], + + [[ 1.40072235e-01, 3.36392275e-03, -1.12255448e-04, -5.78225308e-03, + -1.77192188e-01, -6.19144118e-03, -1.17119585e-01, -1.16353192e-02, + 3.66205780e-02, -3.46907611e-02, -3.57179047e-03, -9.03952453e-03, + 3.77892854e-03, 7.63422898e-03, 1.62939211e-03, 1.45448683e-02, + 9.98680216e-02, -2.60934868e-03, 1.02401140e-01, -3.99776516e-03, + 6.98372955e-02, 8.57505830e-03, -1.55541774e-02, -6.67052505e-04, + -3.87320367e-02, 2.70807684e-02, 4.61517902e-04, 1.48157281e-02, + 2.48281726e-03, 3.77428961e-03, -3.20783604e-03, -5.32528133e-03, + -4.66592653e-03, -4.11151389e-03, 1.53491240e-04, -6.28303818e-03, + -2.60040931e-02, 9.76421739e-04, -3.20749919e-02, 6.17639701e-04, + -3.80348770e-02, 3.50055436e-03, -2.89460088e-02, -6.21961773e-03, + 6.33647814e-03, -3.64798932e-03, 1.61175330e-02, -8.68729069e-03, + 1.87904410e-02]], + + [[ 1.14020750e-01, -6.87045764e-03, -1.14232450e-03, 4.11661189e-02, + -1.63423278e-01, 1.42638757e-03, -1.02391114e-01, -3.10274637e-03, + -4.14102976e-02, -7.65200269e-05, -1.22479669e-03, 1.44271152e-04, + 1.02909411e-03, -2.53524591e-02, 8.46680097e-04, -2.91525407e-02, + 8.15595775e-02, 2.62140564e-03, 8.88886643e-02, 3.47353540e-03, + 6.60543440e-02, 4.57053464e-03, 2.22276194e-02, 1.59968571e-03, + 4.74753501e-02, 9.57052289e-03, -2.72297788e-03, 7.60147678e-04, + -2.24030127e-03, -2.25223735e-03, -1.58726069e-03, 1.44355725e-02, + -2.65302865e-03, 1.43794907e-02, -7.26650570e-04, 1.72575124e-02, + -2.26407370e-02, -3.96339977e-05, -2.79391904e-02, 1.96495129e-04, + -3.42417083e-02, 5.69135891e-04, -2.83188289e-02, -3.90851596e-03, + -9.86179763e-03, -1.53911048e-03, -1.44494506e-02, -3.56561464e-03, + -2.28537982e-02]]], + + [[[ 7.26406236e-02, 7.02981930e-03, -1.87881430e-03, -1.61256813e-02, + -2.64719814e-02, 1.79623468e-02, -2.81063801e-02, 6.54603124e-03, + 1.45408540e-02, -1.01358115e-02, 4.89154209e-04, -2.90726497e-03, + -5.52737463e-04, 8.21197934e-03, -1.38576779e-03, 1.91516929e-02, + 1.90580115e-02, 1.39116265e-03, 3.21379718e-02, -3.41384306e-02, + 1.56080311e-02, 2.17425083e-03, -1.61882394e-02, 2.30982768e-03, + -4.68867105e-03, -6.71668742e-04, -1.23223614e-04, 2.34797057e-03, + 1.21306862e-03, 1.70700484e-03, 2.32198084e-03, -5.41906023e-03, + 4.33958103e-03, -8.72831333e-03, 4.65559442e-04, -1.77434743e-02, + -3.67747484e-03, 2.22363618e-03, -6.92884020e-03, 1.54291482e-03, + -1.17597007e-02, -7.95446846e-04, -9.29108890e-05, 6.00740874e-03, + -8.13945278e-04, 6.46410176e-04, 3.25235814e-03, 5.35093467e-03, + 1.94505156e-03]], + + [[ 9.81325940e-02, 1.52165204e-02, 1.24419995e-03, -1.28939516e-03, + -7.51292526e-02, -4.42181788e-03, -6.18455506e-02, 1.09682359e-02, + 1.47464674e-03, -3.31177366e-02, -2.92832122e-03, -1.02011317e-02, + 3.94651358e-04, 2.74508980e-03, 3.55769102e-03, 4.53198680e-03, + 1.65537934e-02, 6.31128591e-04, 3.60410320e-02, -2.02268925e-02, + 3.71610204e-02, 1.50104427e-02, -2.10404071e-02, -6.66322009e-03, + -2.82880695e-02, 2.37099397e-02, 4.67322046e-04, 1.69550184e-02, + 7.12799550e-04, 5.70549692e-03, 8.82880626e-04, -3.97702624e-03, + -4.87365758e-03, 1.15246875e-03, -2.71843590e-03, 1.70523580e-03, + 8.22004027e-03, -2.16495266e-03, -1.35291214e-02, 1.12046159e-02, + -1.70833051e-02, 9.56991767e-03, -2.20789814e-02, -5.21140434e-03, + -5.63453210e-03, 9.14118089e-03, 1.07848139e-02, -2.44076209e-03, + 7.20372163e-03]], + + [[ 7.82262155e-02, 4.89325589e-03, 9.69035921e-04, 2.48817773e-02, + -5.77909247e-02, 5.15396685e-03, -3.73859556e-02, -1.12652597e-02, + 1.46133523e-03, -1.93929247e-02, -1.66098338e-03, -5.40552612e-03, + 1.90785108e-03, -1.27736588e-02, -2.63140561e-04, -1.64589849e-02, + -1.67926849e-02, 1.37226384e-02, 3.40286811e-02, 1.60456988e-02, + 3.66665622e-02, -4.95865979e-03, -8.81294607e-04, -9.27511404e-04, + -5.56159406e-03, 1.85807717e-02, 4.28397297e-05, 1.14440177e-02, + -8.44202079e-04, 2.83058074e-03, -2.77968111e-03, 7.24897169e-03, + -3.92705015e-03, 8.88560041e-03, -1.14740758e-03, 1.25959122e-02, + 2.28839936e-02, -6.30383042e-04, 6.79235888e-03, 2.74534443e-03, + -1.08549693e-02, 2.89646379e-03, -1.59780244e-02, -7.49181391e-03, + -9.15354118e-03, -1.60175812e-03, -1.81015916e-03, -7.74073756e-04, + 5.32445729e-03]]], + + [[[ 1.34755663e-02, 1.28413970e-02, 1.49938841e-03, -6.47475638e-03, + 2.52029988e-03, 4.88968824e-03, -5.42289754e-03, 1.60941684e-03, + -2.21852068e-03, 2.07330837e-03, 4.86536226e-03, 1.66978774e-04, + -6.71214109e-03, 1.27351141e-03, 9.82399935e-04, 2.22677827e-03, + 2.71719965e-03, 2.06634356e-03, 2.94252961e-03, -8.67336125e-03, + 3.55625935e-04, 1.36775123e-03, -2.08127408e-03, 2.52009045e-04, + -1.49716310e-04, -3.07585778e-04, 1.29094098e-04, -1.78327891e-04, + -3.55467543e-03, -1.03702801e-03, 5.01705916e-03, -1.46068291e-03, + 2.29931595e-03, -2.32968172e-03, 7.43670432e-04, -6.92986861e-04, + -1.00498143e-03, 2.95173070e-05, -1.53450925e-03, 3.21191049e-04, + -1.58863466e-03, 2.65016963e-03, -4.11708281e-04, -7.87924032e-04, + -8.33831423e-04, 1.07967881e-03, 2.63612367e-04, 9.87115809e-04, + 1.21380213e-03]], + + [[ 1.61478356e-02, 1.43728777e-02, 1.93974718e-03, -3.32623522e-03, + -1.17420191e-03, 3.21700718e-03, -8.75686341e-03, 1.37117517e-03, + -4.32829335e-03, -4.42773363e-03, 2.38329302e-03, -3.33902113e-03, + -4.12497515e-03, -6.89667443e-05, 2.41897821e-03, -4.26942050e-03, + -1.74191884e-03, 1.81570155e-03, 3.17867090e-03, -5.10889033e-03, + 4.49678511e-03, -2.60849304e-04, 1.84832760e-05, 4.61673598e-04, + -1.52379129e-03, 9.91300908e-04, 9.15433033e-04, 2.68803221e-03, + -2.02188630e-03, 2.73531219e-03, 3.98791591e-03, -1.49808319e-03, + 3.31828949e-04, 6.51188325e-04, -1.63160178e-03, 2.33668240e-03, + 2.93338642e-03, -5.06191672e-05, -1.03964494e-04, 3.87231978e-04, + -8.64134554e-04, 2.56989460e-03, -2.72089308e-03, -5.28755828e-04, + -2.52279382e-03, 1.50003583e-03, 2.73936763e-04, -7.08454308e-04, + 1.54615336e-03]], + + [[ 1.21073977e-02, 9.65647748e-03, 1.40513870e-03, 5.96978239e-03, + -5.83388001e-03, 1.18491063e-03, -7.42142705e-03, -1.61753221e-04, + 2.31758132e-03, -1.01991745e-02, 1.25833161e-03, -3.28820299e-03, + 9.77997070e-04, -3.33627171e-03, -7.64652162e-04, -4.00396819e-03, + -4.28747212e-03, 2.06971554e-03, 4.22446175e-03, 1.39004952e-03, + 5.11484010e-03, -2.51029606e-04, -1.53415978e-03, -9.05476137e-04, + -3.72103599e-03, 1.62807768e-03, 2.07196281e-03, 5.50544884e-03, + 3.46282883e-04, 3.54274790e-03, -1.15406623e-03, 1.05391112e-03, + -1.00296491e-03, 9.83314237e-04, -5.52227484e-04, -2.60564565e-04, + 3.63180002e-03, 1.15833727e-03, 2.31403574e-03, 6.13974106e-04, + -7.09616975e-04, -1.19250538e-04, -3.14249677e-03, -2.51516937e-04, + -7.67022089e-04, -3.86918453e-04, 7.29633770e-04, -1.56990164e-04, + 9.27285029e-04]]]]) + # 3D array with slices 2-7 with values from 1-6 (with borders of 0) ref_in_labels = np.zeros((10, 10, 10), dtype=np.uint16) for i in range(2, 8): diff --git a/scripts/scil_sh_to_aodf.py b/scripts/scil_sh_to_aodf.py new file mode 100644 index 0000000000..d5c79fe875 --- /dev/null +++ b/scripts/scil_sh_to_aodf.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Script to compute angle-aware bilateral filtering. + +Angle-aware bilateral filtering is an extension of bilateral filtering +considering the angular distance between sphere directions for filtering +5-dimensional spatio-angular images. + +The filtering can be performed on the GPU using pyopencl by specifying +--use_gpu. Make sure you have pyopencl installed to use this option. +Otherwise, the filtering also runs entirely on the CPU, optionally using +multiple processes. + +Using default parameters, fODF filtering for a HCP subject processed with +Tractoflow takes about 12 minutes on the GPU versus 90 minutes using 16 CPU +threads. The time required scales with the sigma_spatial parameter. For +example, sigma_spatial=3.0 takes about 4.15 hours on the GPU versus 7.67 hours +on the CPU using 16 threads. +""" + +import argparse +import logging +import time +import nibabel as nib +import numpy as np + +from dipy.data import SPHERE_FILES +from dipy.reconst.shm import sph_harm_ind_list +from scilpy.reconst.utils import get_sh_order_and_fullness +from scilpy.io.utils import (add_overwrite_arg, add_verbose_arg, + assert_inputs_exist, add_sh_basis_args, + assert_outputs_exist) +from scilpy.denoise.asym_filtering import (cosine_filtering, + angle_aware_bilateral_filtering) + + +EPILOG=""" +[1] Poirier et al, 2022, "Intuitive Angle-Aware Bilateral Filtering Revealing + Asymmetric Fiber ODF for Improved Tractography", ISMRM 2022 (abstract 3552) + +[2] Poirier et al, 2021, "Investigating the Occurrence of Asymmetric Patterns + in White Matter Fiber Orientation Distribution Functions", ISMRM 2021 + (abstract 0865) +""" + + +def _build_arg_parser(): + p = argparse.ArgumentParser(description=__doc__, epilog=EPILOG, + formatter_class=argparse.RawTextHelpFormatter) + p.add_argument('in_sh', + help='Path to the input file.') + + p.add_argument('out_sh', + help='File name for averaged signal.') + + p.add_argument('--out_sym', default=None, + help='Name of optional symmetric output. [%(default)s]') + + add_sh_basis_args(p) + + p.add_argument('--sphere', default='repulsion724', + choices=sorted(SPHERE_FILES.keys()), + help='Sphere used for the SH to SF projection. ' + '[%(default)s]') + + p.add_argument('--method', default='bilateral', + choices=['bilateral', 'cosine'], + help='Method for estimating asymmetric ODFs ' + '[%(default)s].\nOne of:\n' + ' \'bilateral\': Angle-aware bilateral ' + 'filtering [1].\n' + ' \'cosine\' : Cosine-based filtering [2].') + + shared_group = p.add_argument_group('Shared filter arguments') + shared_group.add_argument('--sigma_spatial', default=1.0, type=float, + help='Standard deviation for spatial distance.' + ' [%(default)s]') + + trilateral_group = p.add_argument_group('Angle-aware bilateral arguments') + trilateral_group.add_argument('--sigma_angular', default=1.0, type=float, + help='Standard deviation for angular ' + 'distance. [%(default)s]') + trilateral_group.add_argument('--sigma_range', default=1.0, type=float, + help='Standard deviation for range filter.' + ' [%(default)s]') + + cosine_group = p.add_argument_group('Cosine filter arguments') + cosine_group.add_argument('--sharpness', default=1.0, type=float, + help='Specify sharpness factor to use for' + ' weighted average. [%(default)s]') + + p.add_argument('--use_gpu', action='store_true', + help='Use GPU for computation.') + + add_verbose_arg(p) + add_overwrite_arg(p) + return p + + +def main(): + parser = _build_arg_parser() + args = parser.parse_args() + + if args.verbose: + logging.getLogger().setLevel(logging.INFO) + + outputs = [args.out_sh] + if args.out_sym: + outputs.append(args.out_sym) + assert_outputs_exist(parser, args, outputs) + assert_inputs_exist(parser, args.in_sh) + + # Prepare data + sh_img = nib.load(args.in_sh) + data = sh_img.get_fdata(dtype=np.float32) + + sh_order, full_basis = get_sh_order_and_fullness(data.shape[-1]) + + t0 = time.perf_counter() + logging.info('Filtering SH image.') + if args.method == 'bilateral': + asym_sh = angle_aware_bilateral_filtering( + data, sh_order=sh_order, + sh_basis=args.sh_basis, + in_full_basis=full_basis, + sphere_str=args.sphere, + sigma_spatial=args.sigma_spatial, + sigma_angular=args.sigma_angular, + sigma_range=args.sigma_range, + use_gpu=args.use_gpu) + else: # args.method == 'cosine' + asym_sh = cosine_filtering( + data, sh_order=sh_order, + sh_basis=args.sh_basis, + in_full_basis=full_basis, + sphere_str=args.sphere, + dot_sharpness=args.sharpness, + sigma=args.sigma_spatial) + + t1 = time.perf_counter() + logging.info('Elapsed time (s): {0}'.format(t1 - t0)) + + logging.info('Saving filtered SH to file {0}.'.format(args.out_sh)) + nib.save(nib.Nifti1Image(asym_sh, sh_img.affine), args.out_sh) + + if args.out_sym: + _, orders = sph_harm_ind_list(sh_order, full_basis=True) + logging.info('Saving symmetric SH to file {0}.'.format(args.out_sym)) + nib.save(nib.Nifti1Image(asym_sh[..., orders % 2 == 0], sh_img.affine), + args.out_sym) + + +if __name__ == '__main__': + main() diff --git a/scripts/tests/test_execute_asymmetric_filtering.py b/scripts/tests/test_execute_asymmetric_filtering.py deleted file mode 100644 index 0f7a28f438..0000000000 --- a/scripts/tests/test_execute_asymmetric_filtering.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import os -import tempfile - -from scilpy.io.fetcher import get_testing_files_dict, fetch_data, get_home - - -# If they already exist, this only takes 5 seconds (check md5sum) -fetch_data(get_testing_files_dict(), keys=['processing.zip']) -tmp_dir = tempfile.TemporaryDirectory() - - -def test_help_option(script_runner): - ret = script_runner.run('scil_execute_asymmetric_filtering.py', - '--help') - assert ret.success - - -def test_asym_basis_output(script_runner): - os.chdir(os.path.expanduser(tmp_dir.name)) - in_fodf = os.path.join(get_home(), 'processing', - 'fodf_descoteaux07_sub.nii.gz') - - # We use a low resolution sphere to reduce execution time - ret = script_runner.run('scil_execute_asymmetric_filtering.py', in_fodf, - 'out_0.nii.gz', '--sphere', 'repulsion100') - assert ret.success - - -def test_sym_basis_output(script_runner): - os.chdir(os.path.expanduser(tmp_dir.name)) - in_fodf = os.path.join(get_home(), 'processing', - 'fodf_descoteaux07_sub.nii.gz') - - # We use a low resolution sphere to reduce execution time - ret = script_runner.run('scil_execute_asymmetric_filtering.py', in_fodf, - 'out_1.nii.gz', '--out_sym', 'out_sym.nii.gz', - '--sphere', 'repulsion100') - assert ret.success - - -def test_asym_input(script_runner): - os.chdir(os.path.expanduser(tmp_dir.name)) - in_fodf = os.path.join(get_home(), 'processing', - 'fodf_descoteaux07_sub_full.nii.gz') - - # We use a low resolution sphere to reduce execution time - ret = script_runner.run('scil_execute_asymmetric_filtering.py', in_fodf, - 'out_2.nii.gz', '--sphere', 'repulsion100', '-f') - assert ret.success diff --git a/scripts/tests/test_execute_angle_aware_bilateral_filtering.py b/scripts/tests/test_sh_to_aodf.py similarity index 73% rename from scripts/tests/test_execute_angle_aware_bilateral_filtering.py rename to scripts/tests/test_sh_to_aodf.py index 35b16becac..cca7984e6f 100644 --- a/scripts/tests/test_execute_angle_aware_bilateral_filtering.py +++ b/scripts/tests/test_sh_to_aodf.py @@ -22,15 +22,14 @@ def _mock(*args, **kwargs): img = nib.load(out_fodf) return img.get_fdata().astype(np.float32) - script = 'scil_execute_angle_aware_bilateral_filtering' + script = 'scil_sh_to_aodf' filtering_fn = "angle_aware_bilateral_filtering" return mocker.patch("scripts.{}.{}".format(script, filtering_fn), side_effect=_mock, create=True) def test_help_option(script_runner): - ret = script_runner.run('scil_execute_angle_aware_bilateral_filtering.py', - '--help') + ret = script_runner.run('scil_sh_to_aodf.py', '--help') assert ret.success @@ -40,15 +39,13 @@ def test_help_option(script_runner): def test_asym_basis_output(script_runner, mock_filtering, in_fodf, out_fodf): os.chdir(os.path.expanduser(tmp_dir.name)) - ret = script_runner.run('scil_execute_angle_aware_bilateral_filtering.py', - in_fodf, - 'out_fodf1.nii.gz', + ret = script_runner.run('scil_sh_to_aodf.py', + in_fodf, 'out_fodf1.nii.gz', '--sphere', 'repulsion100', '--sigma_angular', '1.0', '--sigma_spatial', '1.0', '--sigma_range', '1.0', - '--sh_basis', 'descoteaux07', - '--processes', '1', '-f', + '--sh_basis', 'descoteaux07', '-f', print_result=True, shell=True) assert ret.success @@ -67,7 +64,7 @@ def test_sym_basis_output( script_runner, mock_filtering, in_fodf, out_fodf, sym_fodf): os.chdir(os.path.expanduser(tmp_dir.name)) - ret = script_runner.run('scil_execute_angle_aware_bilateral_filtering.py', + ret = script_runner.run('scil_sh_to_aodf.py', in_fodf, 'out_fodf2.nii.gz', '--out_sym', 'out_sym.nii.gz', @@ -75,8 +72,7 @@ def test_sym_basis_output( '--sigma_angular', '1.0', '--sigma_spatial', '1.0', '--sigma_range', '1.0', - '--sh_basis', 'descoteaux07', - '--processes', '1', '-f', + '--sh_basis', 'descoteaux07', '-f', print_result=True, shell=True) assert ret.success @@ -93,15 +89,14 @@ def test_sym_basis_output( def test_asym_input(script_runner, mock_filtering, in_fodf, out_fodf): os.chdir(os.path.expanduser(tmp_dir.name)) - ret = script_runner.run('scil_execute_angle_aware_bilateral_filtering.py', + ret = script_runner.run('scil_sh_to_aodf.py', in_fodf, 'out_fodf3.nii.gz', '--sphere', 'repulsion100', '--sigma_angular', '1.0', '--sigma_spatial', '1.0', '--sigma_range', '1.0', - '--sh_basis', 'descoteaux07', - '--processes', '1', '-f', + '--sh_basis', 'descoteaux07', '-f', print_result=True, shell=True) assert ret.success @@ -110,3 +105,25 @@ def test_asym_input(script_runner, mock_filtering, in_fodf, out_fodf): ret_fodf = nib.load("out_fodf3.nii.gz") test_fodf = nib.load(out_fodf) assert np.allclose(ret_fodf.get_fdata(), test_fodf.get_fdata()) + + +@pytest.mark.parametrize("in_fodf,out_fodf", + [[os.path.join(data_path, 'fodf_descoteaux07_sub.nii.gz'), + os.path.join(data_path, 'fodf_descoteaux07_sub_full.nii.gz')]]) +def test_cosine_method(script_runner, mock_filtering, in_fodf, out_fodf): + os.chdir(os.path.expanduser(tmp_dir.name)) + + ret = script_runner.run('scil_sh_to_aodf.py', + in_fodf, 'out_fodf1.nii.gz', + '--sphere', 'repulsion100', + '--method', 'cosine', + '--sh_basis', 'descoteaux07', + '-f', + print_result=True, shell=True) + + assert ret.success + mock_filtering.assert_not_called() + + ret_fodf = nib.load("out_fodf1.nii.gz") + test_fodf = nib.load(out_fodf) + assert not np.allclose(ret_fodf.get_fdata(), test_fodf.get_fdata()) From c7e4fb6223a2e3701891819bd6ba1d60abaf5fe5 Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Thu, 14 Dec 2023 11:42:28 -0500 Subject: [PATCH 04/63] Delete old scripts --- ...execute_angle_aware_bilateral_filtering.py | 131 ------------------ scripts/scil_execute_asymmetric_filtering.py | 111 --------------- 2 files changed, 242 deletions(-) delete mode 100755 scripts/scil_execute_angle_aware_bilateral_filtering.py delete mode 100755 scripts/scil_execute_asymmetric_filtering.py diff --git a/scripts/scil_execute_angle_aware_bilateral_filtering.py b/scripts/scil_execute_angle_aware_bilateral_filtering.py deleted file mode 100755 index c3f5f0d4d5..0000000000 --- a/scripts/scil_execute_angle_aware_bilateral_filtering.py +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -""" -Script to compute angle-aware bilateral filtering. - -Angle-aware bilateral filtering is an extension of bilateral filtering -considering the angular distance between sphere directions for filtering -5-dimensional spatio-angular images. - -The filtering can be performed on the GPU using pyopencl by specifying ---use_gpu. Make sure you have pyopencl installed to use this option. -Otherwise, the filtering also runs entirely on the CPU, optionally using -multiple processes. - -Using default parameters, fODF filtering for a HCP subject processed with -Tractoflow takes about 12 minutes on the GPU versus 90 minutes using 16 CPU -threads. The time required scales with the sigma_spatial parameter. For -example, sigma_spatial=3.0 takes about 4.15 hours on the GPU versus 7.67 hours -on the CPU using 16 threads. -""" - -import argparse -import logging -import time -import nibabel as nib -import numpy as np - -from dipy.data import SPHERE_FILES -from dipy.reconst.shm import sph_harm_ind_list -from scilpy.reconst.utils import get_sh_order_and_fullness -from scilpy.io.utils import (add_overwrite_arg, - add_processes_arg, - add_verbose_arg, - assert_inputs_exist, - add_sh_basis_args, - assert_outputs_exist, - validate_nbr_processes) -from scilpy.denoise.bilateral_filtering import angle_aware_bilateral_filtering - - -def _build_arg_parser(): - p = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawTextHelpFormatter) - - p.add_argument('in_sh', - help='Path to the input file.') - - p.add_argument('out_sh', - help='File name for averaged signal.') - - add_sh_basis_args(p) - - p.add_argument('--out_sym', default=None, - help='Name of optional symmetric output. [%(default)s]') - - p.add_argument('--sphere', default='repulsion724', - choices=sorted(SPHERE_FILES.keys()), - help='Sphere used for the SH to SF projection. ' - '[%(default)s]') - - p.add_argument('--sigma_angular', default=1.0, type=float, - help='Standard deviation for angular distance.' - ' [%(default)s]') - - p.add_argument('--sigma_spatial', default=1.0, type=float, - help='Standard deviation for spatial distance.' - ' [%(default)s]') - - p.add_argument('--sigma_range', default=1.0, type=float, - help='Standard deviation for range filter.' - ' [%(default)s]') - - p.add_argument('--use_gpu', action='store_true', - help='Use GPU for computation.') - - add_verbose_arg(p) - add_overwrite_arg(p) - add_processes_arg(p) - - return p - - -def main(): - parser = _build_arg_parser() - args = parser.parse_args() - if args.verbose: - logging.getLogger().setLevel(logging.INFO) - - # Checking args - outputs = [args.out_sh] - if args.out_sym: - outputs.append(args.out_sym) - assert_outputs_exist(parser, args, outputs) - assert_inputs_exist(parser, args.in_sh) - - nbr_processes = validate_nbr_processes(parser, args) - - # Prepare data - sh_img = nib.load(args.in_sh) - data = sh_img.get_fdata(dtype=np.float32) - - sh_order, full_basis = get_sh_order_and_fullness(data.shape[-1]) - - t0 = time.perf_counter() - logging.info('Executing angle-aware bilateral filtering.') - asym_sh = angle_aware_bilateral_filtering( - data, sh_order=sh_order, - sh_basis=args.sh_basis, - in_full_basis=full_basis, - sphere_str=args.sphere, - sigma_spatial=args.sigma_spatial, - sigma_angular=args.sigma_angular, - sigma_range=args.sigma_range, - use_gpu=args.use_gpu, - nbr_processes=nbr_processes) - t1 = time.perf_counter() - logging.info('Elapsed time (s): {0}'.format(t1 - t0)) - - logging.info('Saving filtered SH to file {0}.'.format(args.out_sh)) - nib.save(nib.Nifti1Image(asym_sh, sh_img.affine), args.out_sh) - - if args.out_sym: - _, orders = sph_harm_ind_list(sh_order, full_basis=True) - logging.info('Saving symmetric SH to file {0}.'.format(args.out_sym)) - nib.save(nib.Nifti1Image(asym_sh[..., orders % 2 == 0], sh_img.affine), - args.out_sym) - - -if __name__ == "__main__": - main() diff --git a/scripts/scil_execute_asymmetric_filtering.py b/scripts/scil_execute_asymmetric_filtering.py deleted file mode 100755 index 337061128b..0000000000 --- a/scripts/scil_execute_asymmetric_filtering.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -""" -Script to compute per-vertices hemisphere-aware (asymmetric) -filtering of spherical functions (SF) given an array of spherical harmonics -(SH) coefficients. SF are filtered using a first-neighbor Gaussian filter. -Sphere directions are also weighted by their dot product with the direction -to the center of each neighbor, clipping to 0 negative values. - -The argument `sigma` controls the standard deviation of the Gaussian. The -argument `sharpness` controls the exponent of the cosine weights. The higher it -is, the faster the weights of misaligned sphere directions decrease. A -sharpness of 0 gives the same weight to all sphere directions in an hemisphere. -Both `sharpness` and `sigma` must be positive. - -The resulting SF can be expressed using a full SH basis or a symmetric SH basis -(where the effect of the filtering is a simple denoising). - -Using default parameters, the script completes in about 15-20 minutes for a -HCP subject fiber ODF processed with tractoflow. Also note the bigger the -sphere used for SH to SF projection, the higher the RAM consumption and -compute time. -""" - -import argparse -import logging -import nibabel as nib -import numpy as np - -from dipy.reconst.shm import sph_harm_ind_list -from dipy.data import SPHERE_FILES -from scilpy.reconst.utils import get_sh_order_and_fullness -from scilpy.io.utils import (add_overwrite_arg, - add_verbose_arg, - assert_inputs_exist, - add_sh_basis_args, - assert_outputs_exist) -from scilpy.denoise.asym_averaging import local_asym_filtering - - -def _build_arg_parser(): - p = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawTextHelpFormatter) - - p.add_argument('in_sh', - help='Path to the input file.') - - p.add_argument('out_sh', - help='File name for averaged signal.') - - p.add_argument('--out_sym', default=None, - help='Name of optional symmetric output. [%(default)s]') - - add_sh_basis_args(p) - - p.add_argument('--sphere', default='repulsion724', - choices=sorted(SPHERE_FILES.keys()), - help='Sphere used for the SH to SF projection. ' - '[%(default)s]') - - p.add_argument('--sharpness', default=1.0, type=float, - help='Specify sharpness factor to use for weighted average.' - ' [%(default)s]') - - p.add_argument('--sigma', default=1.0, type=float, - help='Sigma of the gaussian to use. [%(default)s]') - - add_verbose_arg(p) - add_overwrite_arg(p) - - return p - - -def main(): - parser = _build_arg_parser() - args = parser.parse_args() - if args.verbose: - logging.getLogger().setLevel(logging.INFO) - - # Checking args - assert_outputs_exist(parser, args, args.out_sh) - assert_inputs_exist(parser, args.in_sh) - - # Prepare data - sh_img = nib.load(args.in_sh) - data = sh_img.get_fdata(dtype=np.float32) - - sh_order, full_basis = get_sh_order_and_fullness(data.shape[-1]) - - logging.info('Executing local asymmetric filtering.') - filtered_sh = local_asym_filtering( - data, sh_order=sh_order, - sh_basis=args.sh_basis, - in_full_basis=full_basis, - sphere_str=args.sphere, - dot_sharpness=args.sharpness, - sigma=args.sigma) - - logging.info('Saving filtered SH to file {0}.'.format(args.out_sh)) - nib.save(nib.Nifti1Image(filtered_sh, sh_img.affine), args.out_sh) - - if args.out_sym: - _, orders = sph_harm_ind_list(sh_order, full_basis=True) - logging.info('Saving symmetric SH to file {0}.'.format(args.out_sym)) - nib.save(nib.Nifti1Image(filtered_sh[..., orders % 2 == 0], - sh_img.affine), args.out_sym) - - -if __name__ == "__main__": - main() From 81b3c1e7f50f9bbb6ecc8376a92ea8b644009c0f Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 14 Dec 2023 11:50:01 -0500 Subject: [PATCH 05/63] update dipy 1.8 --- requirements.txt | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 4122bc8a4f..39301a8966 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ coloredlogs==15.0.* cvxpy==1.3.* cycler==0.11.* Cython==0.29.*, !=0.29.29 -#dipy==1.7.* +dipy==1.8.* deepdiff==6.3.0 dmri-amico==1.5.* dmri-commit==1.6.* @@ -42,6 +42,4 @@ trimeshpy==0.0.3 vtk==9.2.* # Dipy requirements h5py>=2.8.0 -tqdm>=4.30.0 - --e git+https://github.com/scilus/hot_dipy@1.8.0.dev0#egg=dipy \ No newline at end of file +tqdm>=4.30.0 \ No newline at end of file From fe17405b014186f9cf00cbd8c8306f326111e6db Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Thu, 14 Dec 2023 11:50:24 -0500 Subject: [PATCH 06/63] ENH: merge gradients convert --- scilpy/gradients/bvec_bval_tools.py | 24 ++--- scilpy/io/utils.py | 14 +-- .../scil_convert_gradients_fsl_to_mrtrix.py | 3 +- .../scil_convert_gradients_mrtrix_to_fsl.py | 3 +- scripts/scil_gradients_convert.py | 68 +++++++++++++ .../scil_gradients_convert_fsl_to_mrtrix.py | 53 ---------- .../scil_gradients_convert_mrtrix_to_fsl.py | 50 ---------- scripts/scil_visualize_bundles.py | 2 +- scripts/tests/test_gradients_convert.py | 97 +++++++++++++++++++ .../test_gradients_convert_fsl_to_mrtrix.py | 9 ++ 10 files changed, 196 insertions(+), 127 deletions(-) create mode 100755 scripts/scil_gradients_convert.py delete mode 100755 scripts/scil_gradients_convert_fsl_to_mrtrix.py delete mode 100755 scripts/scil_gradients_convert_mrtrix_to_fsl.py create mode 100644 scripts/tests/test_gradients_convert.py diff --git a/scilpy/gradients/bvec_bval_tools.py b/scilpy/gradients/bvec_bval_tools.py index 29dbcb6e77..88c13f61e4 100644 --- a/scilpy/gradients/bvec_bval_tools.py +++ b/scilpy/gradients/bvec_bval_tools.py @@ -141,6 +141,9 @@ def fsl2mrtrix(fsl_bval_filename, fsl_bvec_filename, mrtrix_filename): points = np.loadtxt(fsl_bvec_filename) bvals = np.unique(shells).tolist() + # Remove .bval and .bvec if present + mrtrix_filename = mrtrix_filename.replace('.b', '') + if not points.shape[0] == 3: points = points.transpose() logging.warning('WARNING: Your bvecs seem transposed. ' + @@ -150,11 +153,10 @@ def fsl2mrtrix(fsl_bval_filename, fsl_bvec_filename, mrtrix_filename): save_gradient_sampling_mrtrix(points, shell_idx, bvals, - mrtrix_filename) + mrtrix_filename + '.b') -def mrtrix2fsl(mrtrix_filename, fsl_bval_filename=None, - fsl_bvec_filename=None): +def mrtrix2fsl(mrtrix_filename, fsl_filename): """ Convert a mrtrix encoding.b file to fsl dir_grad.bvec/.bval files. @@ -163,14 +165,12 @@ def mrtrix2fsl(mrtrix_filename, fsl_bval_filename=None, mrtrix_filename : str path to mrtrix encoding.b file. fsl_bval_filename: str - path to the output fsl bval file. Default is - mrtrix_filename.bval. - fsl_bvec_filename: str - path to the output fsl bvec file. Default is - mrtrix_filename.bvec. - Returns - ------- + path to the output fsl files. Files will be named + fsl_bval_filename.bval and fsl_bval_filename.bvec. """ + # Remove .bval and .bvec if present + fsl_filename = fsl_filename.replace('.bval', '') + fsl_filename = fsl_filename.replace('.bvec', '') mrtrix_b = np.loadtxt(mrtrix_filename) if not len(mrtrix_b.shape) == 2 or not mrtrix_b.shape[1] == 4: @@ -185,8 +185,8 @@ def mrtrix2fsl(mrtrix_filename, fsl_bval_filename=None, save_gradient_sampling_fsl(points, shell_idx, bvals, - filename_bval=fsl_bval_filename, - filename_bvec=fsl_bvec_filename) + filename_bval=fsl_filename + '.bval', + filename_bvec=fsl_filename + '.bvec') def identify_shells(bvals, threshold=40.0, roundCentroids=False, sort=False): diff --git a/scilpy/io/utils.py b/scilpy/io/utils.py index 90c8aa6165..64b3d6360a 100644 --- a/scilpy/io/utils.py +++ b/scilpy/io/utils.py @@ -112,7 +112,7 @@ def check_tracts_same_format(parser, filename_list): parser.error('All tracts file must use the same format.') -def assert_gradients_filenames_valid(parser, filename_list, gradient_format): +def assert_gradients_filenames_valid(parser, filename_list, input_is_fsl): """ Validate if gradients filenames follow BIDS or MRtrix convention @@ -122,11 +122,9 @@ def assert_gradients_filenames_valid(parser, filename_list, gradient_format): Parser. filename_list: list list of gradient paths. - gradient_format : str - Can be either fsl or mrtrix. + input_is_fsl: bool + Whether the input is in FSL format or MRtrix format. - Returns - ------- """ valid_fsl_extensions = ['.bval', '.bvec'] @@ -135,7 +133,7 @@ def assert_gradients_filenames_valid(parser, filename_list, gradient_format): if isinstance(filename_list, str): filename_list = [filename_list] - if gradient_format == 'fsl': + if input_is_fsl: if len(filename_list) == 2: filename_1 = filename_list[0] filename_2 = filename_list[1] @@ -158,7 +156,7 @@ def assert_gradients_filenames_valid(parser, filename_list, gradient_format): else: parser.error('You should have two files for fsl format.') - elif gradient_format == 'mrtrix': + else: if len(filename_list) == 1: curr_filename = filename_list[0] basename, ext = os.path.splitext(curr_filename) @@ -167,8 +165,6 @@ def assert_gradients_filenames_valid(parser, filename_list, gradient_format): 'valid for mrtrix format.'.format(basename, ext)) else: parser.error('You should have one file for mrtrix format.') - else: - parser.error('Gradient file format should be either fsl or mrtrix.') def add_json_args(parser): diff --git a/scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py b/scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py index 15791c130b..33592e88fd 100755 --- a/scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py +++ b/scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py @@ -6,7 +6,8 @@ DEPRECATION_MSG = """ -This script has been renamed scil_gradients_convert_fsl_to_mrtrix.py. +This script has been merged with scil_gradients_convert_mrtrix_to_fsl.py +and renamed scil_gradients_convert.py. Please change your existing pipelines accordingly. """ diff --git a/scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py b/scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py index 26611a7b2a..0318cbd848 100755 --- a/scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py +++ b/scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py @@ -6,7 +6,8 @@ DEPRECATION_MSG = """ -This script has been renamed scil_gradients_convert_mrtrix_to_fsl.py. +This script has been merged with scil_gradients_convert_fsl_to_mrtrix.py +and renamed scil_gradients_convert.py. Please change your existing pipelines accordingly. """ diff --git a/scripts/scil_gradients_convert.py b/scripts/scil_gradients_convert.py new file mode 100755 index 0000000000..722267f105 --- /dev/null +++ b/scripts/scil_gradients_convert.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Script to convert gradient tables between FSL and MRtrix formats. + + +""" + +import argparse +import logging + +from scilpy.io.utils import (assert_gradients_filenames_valid, + assert_inputs_exist, assert_outputs_exist, + add_overwrite_arg, add_verbose_arg) +from scilpy.gradients.bvec_bval_tools import fsl2mrtrix, mrtrix2fsl + + +def _build_arg_parser(): + p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, + description=__doc__) + + p.add_argument('gradients', nargs='+', metavar='GRADIENT_FILE(S)', + help='Path(s) to the gradient file(s). Either FSL ' + '(.bval, .bvec) or MRtrix (.b).') + + p.add_argument('output', type=str, + help='Path to output file(s) without extension. Either ' + 'FSL (output.bval, output.bvec) or MRtrix (output.b).') + + grad_format_group = p.add_mutually_exclusive_group(required=True) + grad_format_group.add_argument('--input_fsl', action='store_true', + help='FSL format.') + grad_format_group.add_argument('--input_mrtrix', action='store_true', + help='MRtrix format.') + + add_overwrite_arg(p) + add_verbose_arg(p) + + return p + + +def main(): + parser = _build_arg_parser() + args = parser.parse_args() + + if args.verbose: + logging.getLogger().setLevel(logging.INFO) + + input_is_fsl = args.input_fsl + + assert_gradients_filenames_valid(parser, args.gradients, input_is_fsl) + assert_inputs_exist(parser, args.gradients) + + if not input_is_fsl: + output = [args.output + '.bval', args.output + '.bvec'] + assert_outputs_exist(parser, args, output[0], output[1]) + mrtrix_b = args.gradients[0] + mrtrix2fsl(mrtrix_b, args.output) + else: + output = args.output + '.b' + assert_outputs_exist(parser, args, output) + fsl_bval, fsl_bvec = args.gradients + fsl2mrtrix(fsl_bval, fsl_bvec, args.output) + + +if __name__ == "__main__": + main() diff --git a/scripts/scil_gradients_convert_fsl_to_mrtrix.py b/scripts/scil_gradients_convert_fsl_to_mrtrix.py deleted file mode 100755 index 20fc751a72..0000000000 --- a/scripts/scil_gradients_convert_fsl_to_mrtrix.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -""" -Script to convert bval/bvec FSL style to MRtrix style. -""" - -import argparse -import logging - -from scilpy.io.utils import (assert_gradients_filenames_valid, - assert_inputs_exist, assert_outputs_exist, - add_overwrite_arg, add_verbose_arg) -from scilpy.gradients.bvec_bval_tools import fsl2mrtrix - - -def _build_arg_parser(): - p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, - description=__doc__) - - p.add_argument('fsl_bval', - help='Path to FSL b-value file (.bval).') - - p.add_argument('fsl_bvec', - help='Path to FSL gradient directions file (.bvec).') - - p.add_argument('mrtrix_enc', - help='Path to gradient directions encoding file (.b).') - - add_overwrite_arg(p) - add_verbose_arg(p) - - return p - - -def main(): - parser = _build_arg_parser() - args = parser.parse_args() - - if args.verbose: - logging.getLogger().setLevel(logging.INFO) - - assert_gradients_filenames_valid(parser, [args.fsl_bval, args.fsl_bvec], - 'fsl') - assert_gradients_filenames_valid(parser, args.mrtrix_enc, 'mrtrix') - assert_inputs_exist(parser, [args.fsl_bval, args.fsl_bvec]) - assert_outputs_exist(parser, args, args.mrtrix_enc) - - fsl2mrtrix(args.fsl_bval, args.fsl_bvec, args.mrtrix_enc) - - -if __name__ == "__main__": - main() diff --git a/scripts/scil_gradients_convert_mrtrix_to_fsl.py b/scripts/scil_gradients_convert_mrtrix_to_fsl.py deleted file mode 100755 index 8f8a2463df..0000000000 --- a/scripts/scil_gradients_convert_mrtrix_to_fsl.py +++ /dev/null @@ -1,50 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -""" -Script to convert bval/bvec MRtrix style to FSL style. -""" - -import argparse -import logging - -from scilpy.io.utils import (add_overwrite_arg, - add_verbose_arg, - assert_gradients_filenames_valid, - assert_outputs_exist) -from scilpy.gradients.bvec_bval_tools import mrtrix2fsl - - -def _build_arg_parser(): - p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, - description=__doc__) - - p.add_argument('mrtrix_enc', - help='Path to the gradient directions encoding file. (.b)') - p.add_argument('fsl_bval', - help='Path to output FSL b-value file (.bval).') - p.add_argument('fsl_bvec', - help='Path to output FSL gradient directions file (.bvec).') - - add_overwrite_arg(p) - add_verbose_arg(p) - - return p - - -def main(): - parser = _build_arg_parser() - args = parser.parse_args() - - if args.verbose: - logging.getLogger().setLevel(logging.INFO) - - assert_gradients_filenames_valid(parser, args.mrtrix_enc, 'mrtrix') - assert_gradients_filenames_valid(parser, [args.fsl_bval, args.fsl_bvec], - 'fsl') - assert_outputs_exist(parser, args, [args.fsl_bval, args.fsl_bvec]) - - mrtrix2fsl(args.mrtrix_enc, args.fsl_bval, args.fsl_bvec) - - -if __name__ == "__main__": - main() diff --git a/scripts/scil_visualize_bundles.py b/scripts/scil_visualize_bundles.py index e468866dd1..7352cc6785 100755 --- a/scripts/scil_visualize_bundles.py +++ b/scripts/scil_visualize_bundles.py @@ -177,7 +177,7 @@ def subsample(list_obj): color = subsample( tractogram_gen.data_per_point[args.color_from_points]) elif args.uniform_coloring: # Assign uniform coloring to streamlines - color = tuple(np.asarray(args.uniform_coloring) / 255) + color = tuple(np.asarray(args.uniform_coloring + [5]) / 255) elif args.local_coloring: # Compute coloring from local orientations # Compute segment orientation diff = [np.diff(list(s), axis=0) for s in streamlines] diff --git a/scripts/tests/test_gradients_convert.py b/scripts/tests/test_gradients_convert.py new file mode 100644 index 0000000000..a01e30065f --- /dev/null +++ b/scripts/tests/test_gradients_convert.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +import tempfile + +from scilpy.io.fetcher import fetch_data, get_home, get_testing_files_dict + +# If they already exist, this only takes 5 seconds (check md5sum) +fetch_data(get_testing_files_dict(), keys=['processing.zip']) +tmp_dir = tempfile.TemporaryDirectory() + + +def test_help_option(script_runner): + ret = script_runner.run('scil_gradients_convert.py', + '--help') + assert ret.success + + +def test_execution_processing_fsl(script_runner): + os.chdir(os.path.expanduser(tmp_dir.name)) + in_bval = os.path.join(get_home(), 'processing', + '1000.bval') + in_bvec = os.path.join(get_home(), 'processing', + '1000.bvec') + ret = script_runner.run('scil_gradients_convert.py', + '--input_fsl', + in_bval, in_bvec, '1000') + assert ret.success + + +def test_execution_processing_mrtrix(script_runner): + os.chdir(os.path.expanduser(tmp_dir.name)) + in_encoding = os.path.join(get_home(), 'processing', + '1000.b') + ret = script_runner.run('scil_gradients_convert.py', + '--input_mrtrix', + in_encoding, '1000') + assert ret.success + + +def test_name_validation_mrtrix(script_runner): + os.chdir(os.path.expanduser(tmp_dir.name)) + in_bval = os.path.join(get_home(), 'processing', + '1000.bval') + in_bvec = os.path.join(get_home(), 'processing', + '1000.bvec') + ret = script_runner.run('scil_gradients_convert.py', + '--input_fsl', + in_bval, in_bvec, '1000_test.b') + assert ret.success + + wrong_path = os.path.join(tmp_dir.name, '1000_test.b.b') + assert not os.path.isfile(wrong_path) + + right_path = os.path.join(tmp_dir.name, '1000_test.b') + assert os.path.isfile(right_path) + + +def test_name_validation_fsl_bval(script_runner): + os.chdir(os.path.expanduser(tmp_dir.name)) + in_encoding = os.path.join(get_home(), 'processing', + '1000.b') + ret = script_runner.run('scil_gradients_convert.py', + '--input_mrtrix', + in_encoding, '1000_test.bval') + assert ret.success + + wrong_path_bval = os.path.join(tmp_dir.name, '1000_test.bval.bval') + assert not os.path.isfile(wrong_path_bval) + wrong_path_bvec = os.path.join(tmp_dir.name, '1000_test.bval.bvec') + assert not os.path.isfile(wrong_path_bvec) + + right_path_bval = os.path.join(tmp_dir.name, '1000_test.bval') + assert os.path.isfile(right_path_bval) + right_path_bvec = os.path.join(tmp_dir.name, '1000_test.bvec') + assert os.path.isfile(right_path_bvec) + + +def test_name_validation_fsl_bvec(script_runner): + os.chdir(os.path.expanduser(tmp_dir.name)) + in_encoding = os.path.join(get_home(), 'processing', + '1000.b') + ret = script_runner.run('scil_gradients_convert.py', + '--input_mrtrix', + in_encoding, '1000_test.bvec') + assert ret.success + + wrong_path_bval = os.path.join(tmp_dir.name, '1000_test.bvec.bval') + assert not os.path.isfile(wrong_path_bval) + wrong_path_bvec = os.path.join(tmp_dir.name, '1000_test.bvec.bvec') + assert not os.path.isfile(wrong_path_bvec) + + right_path_bval = os.path.join(tmp_dir.name, '1000_test.bval') + assert os.path.isfile(right_path_bval) + right_path_bvec = os.path.join(tmp_dir.name, '1000_test.bvec') + assert os.path.isfile(right_path_bvec) diff --git a/scripts/tests/test_gradients_convert_fsl_to_mrtrix.py b/scripts/tests/test_gradients_convert_fsl_to_mrtrix.py index a3737f40ee..6278bc31a8 100644 --- a/scripts/tests/test_gradients_convert_fsl_to_mrtrix.py +++ b/scripts/tests/test_gradients_convert_fsl_to_mrtrix.py @@ -26,3 +26,12 @@ def test_execution_processing(script_runner): ret = script_runner.run('scil_gradients_convert_fsl_to_mrtrix.py', in_bval, in_bvec, '1000.b') assert ret.success + + +def test_execution_processing(script_runner): + os.chdir(os.path.expanduser(tmp_dir.name)) + in_encoding = os.path.join(get_home(), 'processing', + '1000.b') + ret = script_runner.run('scil_gradients_convert_mrtrix_to_fsl.py', + in_encoding, '1000.bval', '1000.bvec') + assert ret.success From 32e23b030c1b3671ac5415443151cf292b8ac326 Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Thu, 14 Dec 2023 11:54:21 -0500 Subject: [PATCH 07/63] FIX: remove irrelevant file --- scripts/scil_visualize_bundles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_visualize_bundles.py b/scripts/scil_visualize_bundles.py index 7352cc6785..e468866dd1 100755 --- a/scripts/scil_visualize_bundles.py +++ b/scripts/scil_visualize_bundles.py @@ -177,7 +177,7 @@ def subsample(list_obj): color = subsample( tractogram_gen.data_per_point[args.color_from_points]) elif args.uniform_coloring: # Assign uniform coloring to streamlines - color = tuple(np.asarray(args.uniform_coloring + [5]) / 255) + color = tuple(np.asarray(args.uniform_coloring) / 255) elif args.local_coloring: # Compute coloring from local orientations # Compute segment orientation diff = [np.diff(list(s), axis=0) for s in streamlines] From ad2e6d151264ea8bfbcc2385134d635242e16d4f Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Thu, 14 Dec 2023 11:57:15 -0500 Subject: [PATCH 08/63] Update documentation --- scripts/scil_sh_to_aodf.py | 31 +++++++++++++++---------------- scripts/tests/test_sh_to_aodf.py | 5 +++++ 2 files changed, 20 insertions(+), 16 deletions(-) diff --git a/scripts/scil_sh_to_aodf.py b/scripts/scil_sh_to_aodf.py index d5c79fe875..3bedb78851 100644 --- a/scripts/scil_sh_to_aodf.py +++ b/scripts/scil_sh_to_aodf.py @@ -1,22 +1,18 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Script to compute angle-aware bilateral filtering. - -Angle-aware bilateral filtering is an extension of bilateral filtering -considering the angular distance between sphere directions for filtering -5-dimensional spatio-angular images. - -The filtering can be performed on the GPU using pyopencl by specifying ---use_gpu. Make sure you have pyopencl installed to use this option. -Otherwise, the filtering also runs entirely on the CPU, optionally using -multiple processes. - -Using default parameters, fODF filtering for a HCP subject processed with -Tractoflow takes about 12 minutes on the GPU versus 90 minutes using 16 CPU -threads. The time required scales with the sigma_spatial parameter. For -example, sigma_spatial=3.0 takes about 4.15 hours on the GPU versus 7.67 hours -on the CPU using 16 threads. +Script to estimate asymmetric ODFs (aODFs) from a spherical harmonics image. + +Two methods are available: + * Angle-aware bilateral filtering [1] is an extension of bilateral + filtering considering the angular distance between sphere directions + for filtering 5-dimensional spatio-angular images. + * Cosine filtering [2] is a simpler implementation using cosine distance + for assigning weights to neighbours. + +Angle-aware bilateral filtering can be performed on the GPU using pyopencl by +specifying --use_gpu. Make sure you have pyopencl installed to use this option. +Otherwise, the filtering runs entirely on the CPU. """ import argparse @@ -105,6 +101,9 @@ def main(): if args.verbose: logging.getLogger().setLevel(logging.INFO) + if args.use_gpu and args.method == 'cosine': + parser.error('Option --use_gpu is not supported for cosine filtering.') + outputs = [args.out_sh] if args.out_sym: outputs.append(args.out_sym) diff --git a/scripts/tests/test_sh_to_aodf.py b/scripts/tests/test_sh_to_aodf.py index cca7984e6f..a5d9eb469a 100644 --- a/scripts/tests/test_sh_to_aodf.py +++ b/scripts/tests/test_sh_to_aodf.py @@ -122,8 +122,13 @@ def test_cosine_method(script_runner, mock_filtering, in_fodf, out_fodf): print_result=True, shell=True) assert ret.success + + # method cosine is fast and not mocked mock_filtering.assert_not_called() ret_fodf = nib.load("out_fodf1.nii.gz") test_fodf = nib.load(out_fodf) + + # We expect the output to be different from the + # one obtained with angle-aware bilateral filtering assert not np.allclose(ret_fodf.get_fdata(), test_fodf.get_fdata()) From 57a780ad0b8395bba0a9efb159dabb306de7d505 Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 14 Dec 2023 12:00:59 -0500 Subject: [PATCH 09/63] Improved docstring and renaming --- ...y => scil_json_convert_entries_to_xlsx.py} | 4 ++-- ...json.py => scil_json_harmonize_entries.py} | 10 ++++++--- ...rge_json.py => scil_json_merge_entries.py} | 22 ++++++++++++++++++- ...y => test_json_convert_entries_to_xlsx.py} | 4 ++-- scripts/tests/test_json_harmonize_entries.py | 6 +++++ ...rge_json.py => test_json_merge_entries.py} | 4 ++-- 6 files changed, 40 insertions(+), 10 deletions(-) rename scripts/{scil_convert_json_to_xlsx.py => scil_json_convert_entries_to_xlsx.py} (99%) rename scripts/{scil_harmonize_json.py => scil_json_harmonize_entries.py} (84%) rename scripts/{scil_merge_json.py => scil_json_merge_entries.py} (76%) rename scripts/tests/{test_convert_json_to_xlsx.py => test_json_convert_entries_to_xlsx.py} (80%) create mode 100644 scripts/tests/test_json_harmonize_entries.py rename scripts/tests/{test_merge_json.py => test_json_merge_entries.py} (85%) diff --git a/scripts/scil_convert_json_to_xlsx.py b/scripts/scil_json_convert_entries_to_xlsx.py similarity index 99% rename from scripts/scil_convert_json_to_xlsx.py rename to scripts/scil_json_convert_entries_to_xlsx.py index 2c48e04ef6..879e1ccd2d 100755 --- a/scripts/scil_convert_json_to_xlsx.py +++ b/scripts/scil_json_convert_entries_to_xlsx.py @@ -1,8 +1,8 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -""" Convert a final aggregated json file to an Excel spreadsheet. Typically -used during the tractometry pipeline. +""" Convert a final aggregated json file to an Excel spreadsheet. +Typically used during the tractometry pipeline. """ import argparse diff --git a/scripts/scil_harmonize_json.py b/scripts/scil_json_harmonize_entries.py similarity index 84% rename from scripts/scil_harmonize_json.py rename to scripts/scil_json_harmonize_entries.py index 03300f3c5e..a735089857 100755 --- a/scripts/scil_harmonize_json.py +++ b/scripts/scil_json_harmonize_entries.py @@ -1,12 +1,16 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -""" -This script will harmonize a json file by adding missing keys and values that -differs between the different layers of the dictionary. +""" This script will harmonize a json file by adding missing keys and values +that differs between the different layers of the dictionary. This is use only (for now) in Aggregate_All_* portion of tractometry-flow, to counter the problem of missing bundles/metrics/lesions between subjects. + +The most common use case is when specific subjects have missing bundles +which will cause a panda array to be incomplete, and thus crash. Finding out +the union of all bundles/metrics/lesions will allow to create a complete json +(but with NaN for missing values). """ import argparse diff --git a/scripts/scil_merge_json.py b/scripts/scil_json_merge_entries.py similarity index 76% rename from scripts/scil_merge_json.py rename to scripts/scil_json_merge_entries.py index 81db336436..7f7e624f57 100755 --- a/scripts/scil_merge_json.py +++ b/scripts/scil_json_merge_entries.py @@ -2,8 +2,28 @@ # -*- coding: utf-8 -*- """ Merge multiple json file into a single one. -the --keep_separate option will add an entry for each file, the basename will +Typically used during the tractometry pipeline. + +Without option it will simply merge all entries at the top level, the top +level must not have any conflicting keys. + +--keep_separate option will add a parent for each file, its basename will become the key. + +--no_list option will merge all entries at the top level, if there is a +conflict the lowest level will be extended with the new values (if list) or +added (if value) + +--add_parent_key option will add a parent key before merging all entries. + +--remove_parent_key option will remove the parent key before merging all +entries. + +--recursive option will merge all entries (scalar) at the lowest layers as a +list. + +--average_last_layer option will average all entries (scalar) at the lowest +layers, but instead of creating a list it creates a mean/std level. """ import argparse diff --git a/scripts/tests/test_convert_json_to_xlsx.py b/scripts/tests/test_json_convert_entries_to_xlsx.py similarity index 80% rename from scripts/tests/test_convert_json_to_xlsx.py rename to scripts/tests/test_json_convert_entries_to_xlsx.py index 635c6b6aa8..0edf98775c 100644 --- a/scripts/tests/test_convert_json_to_xlsx.py +++ b/scripts/tests/test_json_convert_entries_to_xlsx.py @@ -13,7 +13,7 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_convert_json_to_xlsx.py', '--help') + ret = script_runner.run('scil_json_convert_entries_to_xlsx.py', '--help') assert ret.success @@ -21,7 +21,7 @@ def test_execution_tractometry(script_runner): os.chdir(os.path.expanduser(tmp_dir.name)) in_json = os.path.join(get_home(), 'tractometry', 'length_stats_1.json') - ret = script_runner.run('scil_convert_json_to_xlsx.py', in_json, + ret = script_runner.run('scil_json_convert_entries_to_xlsx.py', in_json, 'length_stats.xlsx') assert ret.success diff --git a/scripts/tests/test_json_harmonize_entries.py b/scripts/tests/test_json_harmonize_entries.py new file mode 100644 index 0000000000..42be741186 --- /dev/null +++ b/scripts/tests/test_json_harmonize_entries.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +def test_help_option(script_runner): + ret = script_runner.run('scil_json_harmonize_entries.py', '--help') + assert ret.success diff --git a/scripts/tests/test_merge_json.py b/scripts/tests/test_json_merge_entries.py similarity index 85% rename from scripts/tests/test_merge_json.py rename to scripts/tests/test_json_merge_entries.py index 311a1bce51..5d10c49300 100644 --- a/scripts/tests/test_merge_json.py +++ b/scripts/tests/test_json_merge_entries.py @@ -13,7 +13,7 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_merge_json.py', '--help') + ret = script_runner.run('scil_json_merge_entries.py', '--help') assert ret.success @@ -23,7 +23,7 @@ def test_execution_tractometry(script_runner): 'length_stats_1.json') in_json_2 = os.path.join(get_home(), 'tractometry', 'length_stats_2.json') - ret = script_runner.run('scil_merge_json.py', in_json_1, + ret = script_runner.run('scil_json_merge_entries.py', in_json_1, in_json_2, 'merge.json', '--keep_separate') assert ret.success From 0285fdd0e429502f434574068d206c7d780cff86 Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Thu, 14 Dec 2023 12:05:10 -0500 Subject: [PATCH 10/63] PEP8 --- scilpy/denoise/tests/test_asym_filtering.py | 2 +- scripts/scil_sh_to_aodf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scilpy/denoise/tests/test_asym_filtering.py b/scilpy/denoise/tests/test_asym_filtering.py index 7dd066cecf..b626a0ed80 100644 --- a/scilpy/denoise/tests/test_asym_filtering.py +++ b/scilpy/denoise/tests/test_asym_filtering.py @@ -43,4 +43,4 @@ def test_cosine_filtering(): out = cosine_filtering(in_sh, sh_order, sh_basis, full_basis, sharpness, sphere_str, sigma_spatial) - assert np.allclose(out, fodf_3x3_order8_descoteaux07_filtered_cosine) \ No newline at end of file + assert np.allclose(out, fodf_3x3_order8_descoteaux07_filtered_cosine) diff --git a/scripts/scil_sh_to_aodf.py b/scripts/scil_sh_to_aodf.py index 3bedb78851..ffda00ddf1 100644 --- a/scripts/scil_sh_to_aodf.py +++ b/scripts/scil_sh_to_aodf.py @@ -31,7 +31,7 @@ angle_aware_bilateral_filtering) -EPILOG=""" +EPILOG = """ [1] Poirier et al, 2022, "Intuitive Angle-Aware Bilateral Filtering Revealing Asymmetric Fiber ODF for Improved Tractography", ISMRM 2022 (abstract 3552) From 2f340d9b75a8c4cefb87a329375a36ec515506ae Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Thu, 14 Dec 2023 13:03:26 -0500 Subject: [PATCH 11/63] ENH: remove old tests --- .../test_gradients_convert_fsl_to_mrtrix.py | 37 ------------------- .../test_gradients_convert_mrtrix_to_fsl.py | 26 ------------- 2 files changed, 63 deletions(-) delete mode 100644 scripts/tests/test_gradients_convert_fsl_to_mrtrix.py delete mode 100644 scripts/tests/test_gradients_convert_mrtrix_to_fsl.py diff --git a/scripts/tests/test_gradients_convert_fsl_to_mrtrix.py b/scripts/tests/test_gradients_convert_fsl_to_mrtrix.py deleted file mode 100644 index 6278bc31a8..0000000000 --- a/scripts/tests/test_gradients_convert_fsl_to_mrtrix.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import os -import tempfile - -from scilpy.io.fetcher import fetch_data, get_home, get_testing_files_dict - -# If they already exist, this only takes 5 seconds (check md5sum) -fetch_data(get_testing_files_dict(), keys=['processing.zip']) -tmp_dir = tempfile.TemporaryDirectory() - - -def test_help_option(script_runner): - ret = script_runner.run('scil_gradients_convert_fsl_to_mrtrix.py', - '--help') - assert ret.success - - -def test_execution_processing(script_runner): - os.chdir(os.path.expanduser(tmp_dir.name)) - in_bval = os.path.join(get_home(), 'processing', - '1000.bval') - in_bvec = os.path.join(get_home(), 'processing', - '1000.bvec') - ret = script_runner.run('scil_gradients_convert_fsl_to_mrtrix.py', - in_bval, in_bvec, '1000.b') - assert ret.success - - -def test_execution_processing(script_runner): - os.chdir(os.path.expanduser(tmp_dir.name)) - in_encoding = os.path.join(get_home(), 'processing', - '1000.b') - ret = script_runner.run('scil_gradients_convert_mrtrix_to_fsl.py', - in_encoding, '1000.bval', '1000.bvec') - assert ret.success diff --git a/scripts/tests/test_gradients_convert_mrtrix_to_fsl.py b/scripts/tests/test_gradients_convert_mrtrix_to_fsl.py deleted file mode 100644 index 514b6346ca..0000000000 --- a/scripts/tests/test_gradients_convert_mrtrix_to_fsl.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import os -import tempfile - -from scilpy.io.fetcher import fetch_data, get_home, get_testing_files_dict - -# If they already exist, this only takes 5 seconds (check md5sum) -fetch_data(get_testing_files_dict(), keys=['processing.zip']) -tmp_dir = tempfile.TemporaryDirectory() - - -def test_help_option(script_runner): - ret = script_runner.run('scil_gradients_convert_mrtrix_to_fsl.py', - '--help') - assert ret.success - - -def test_execution_processing(script_runner): - os.chdir(os.path.expanduser(tmp_dir.name)) - in_encoding = os.path.join(get_home(), 'processing', - '1000.b') - ret = script_runner.run('scil_gradients_convert_mrtrix_to_fsl.py', - in_encoding, '1000.bval', '1000.bvec') - assert ret.success From 64c41c9a4cdc5a76c4d4a6852f73dfe796263bcd Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Thu, 14 Dec 2023 13:12:31 -0500 Subject: [PATCH 12/63] script renaming part 1 --- scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py | 2 +- ...df_lobe_specific_metrics.py => scil_fodf_bingham_metrics.py} | 0 ...df_lobe_specific_metrics.py => test_fodf_bingham_metrics.py} | 0 3 files changed, 1 insertion(+), 1 deletion(-) rename scripts/{scil_fodf_lobe_specific_metrics.py => scil_fodf_bingham_metrics.py} (100%) rename scripts/tests/{test_fodf_lobe_specific_metrics.py => test_fodf_bingham_metrics.py} (100%) diff --git a/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py b/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py index ee06b64a7e..20ca2ba831 100755 --- a/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py +++ b/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- from scilpy.io.deprecator import deprecate_script -from scripts.scil_fodf_lobe_specific_metrics import main as new_main +from scripts.scil_fodf_bingham_metrics import main as new_main DEPRECATION_MSG = """ diff --git a/scripts/scil_fodf_lobe_specific_metrics.py b/scripts/scil_fodf_bingham_metrics.py similarity index 100% rename from scripts/scil_fodf_lobe_specific_metrics.py rename to scripts/scil_fodf_bingham_metrics.py diff --git a/scripts/tests/test_fodf_lobe_specific_metrics.py b/scripts/tests/test_fodf_bingham_metrics.py similarity index 100% rename from scripts/tests/test_fodf_lobe_specific_metrics.py rename to scripts/tests/test_fodf_bingham_metrics.py From 70ca47084b512f0f6c514a2c3ff64776f6a23f60 Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Thu, 14 Dec 2023 13:18:58 -0500 Subject: [PATCH 13/63] Move scripts to legacy --- ...execute_angle_aware_bilateral_filtering.py | 21 +++++++++++++++++++ .../scil_execute_asymmetric_filtering.py | 21 +++++++++++++++++++ 2 files changed, 42 insertions(+) create mode 100755 scripts/legacy/scil_execute_angle_aware_bilateral_filtering.py create mode 100755 scripts/legacy/scil_execute_asymmetric_filtering.py diff --git a/scripts/legacy/scil_execute_angle_aware_bilateral_filtering.py b/scripts/legacy/scil_execute_angle_aware_bilateral_filtering.py new file mode 100755 index 0000000000..37516d3041 --- /dev/null +++ b/scripts/legacy/scil_execute_angle_aware_bilateral_filtering.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_sh_to_aodf import main as new_main + + +DEPRECATION_MSG = """ +This script has been merged with scil_execute_asymmetric_filtering.py +into scil_sh_to_aodf.py Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_execute_angle_aware_bilateral_filtering.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/legacy/scil_execute_asymmetric_filtering.py b/scripts/legacy/scil_execute_asymmetric_filtering.py new file mode 100755 index 0000000000..eba6c576a7 --- /dev/null +++ b/scripts/legacy/scil_execute_asymmetric_filtering.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_sh_to_aodf import main as new_main + + +DEPRECATION_MSG = """ +This script has been merged with scil_execute_angle_aware_bilateral_filtering.py +into scil_sh_to_aodf.py Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_execute_asymmetric_filtering.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() From 34a319fde3a65ef0596e82e469824150ee872fbb Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 14 Dec 2023 13:20:47 -0500 Subject: [PATCH 14/63] validate helps with new names, add formally + pep8 --- scripts/legacy/scil_compute_bundle_volume.py | 2 +- .../scil_compute_fodf_max_in_ventricles.py | 3 +- ...scil_compute_lobe_specific_fodf_metrics.py | 3 +- .../legacy/scil_remove_invalid_streamlines.py | 3 +- scripts/legacy/scil_resample_streamlines.py | 4 +- .../scil_uniformize_streamlines_endpoints.py | 4 +- scripts/scil_NODDI_maps.py | 2 + scripts/scil_NODDI_priors.py | 10 ++-- scripts/scil_aodf_metrics.py | 2 + scripts/scil_apply_transform_to_hdf5.py | 48 ++++++++++--------- .../scil_assign_custom_color_to_tractogram.py | 5 +- scripts/scil_btensor_metrics.py | 2 + scripts/scil_bundle_compute_centroid.py | 2 + scripts/scil_bundle_compute_endpoints_map.py | 4 +- scripts/scil_bundle_diameter.py | 13 +++-- scripts/scil_bundle_filter_by_occurence.py | 2 + scripts/scil_bundle_generate_priors.py | 2 + scripts/scil_bundle_label_map.py | 10 ++-- scripts/scil_bundle_mean_fixel_afd.py | 15 +++--- .../scil_bundle_mean_fixel_afd_from_hdf5.py | 8 ++-- scripts/scil_bundle_mean_fixel_lobe_metric.py | 4 +- scripts/scil_bundle_mean_std.py | 5 +- scripts/scil_bundle_pairwise_comparison.py | 16 +++++-- ...undle_score_many_bundles_one_tractogram.py | 2 + ...le_score_same_bundle_many_segmentations.py | 26 ++++++---- scripts/scil_bundle_shape_measures.py | 5 +- scripts/scil_bundle_volume_per_label.py | 4 +- scripts/scil_clean_qbx_clusters.py | 5 +- .../scil_connectivity_compare_populations.py | 6 ++- scripts/scil_connectivity_compute_matrices.py | 26 ++++++---- scripts/scil_connectivity_filter.py | 2 + scripts/scil_connectivity_graph_measures.py | 8 ++-- scripts/scil_connectivity_normalize.py | 2 + .../scil_connectivity_pairwise_agreement.py | 2 + scripts/scil_connectivity_print_filenames.py | 2 + scripts/scil_connectivity_reorder_rois.py | 2 + scripts/scil_dki_metrics.py | 5 +- scripts/scil_dti_metrics.py | 2 + scripts/scil_dwi_apply_bias_field.py | 2 + scripts/scil_dwi_compute_snr.py | 1 + scripts/scil_dwi_concatenate.py | 2 + scripts/scil_dwi_extract_b0.py | 2 + scripts/scil_dwi_extract_shell.py | 1 + scripts/scil_dwi_powder_average.py | 2 + scripts/scil_dwi_reorder_philips.py | 5 +- scripts/scil_dwi_split_by_indices.py | 1 + scripts/scil_dwi_to_sh.py | 2 + scripts/scil_fodf_lobe_specific_metrics.py | 2 + scripts/scil_fodf_max_in_ventricles.py | 2 + scripts/scil_fodf_memsmt.py | 6 ++- scripts/scil_fodf_metrics.py | 27 ++++++----- scripts/scil_fodf_msmt.py | 2 + scripts/scil_fodf_ssst.py | 2 + scripts/scil_fodf_to_bingham.py | 2 + scripts/scil_freewater_maps.py | 2 + scripts/scil_frf_mean.py | 2 + scripts/scil_frf_memsmt.py | 2 + scripts/scil_frf_msmt.py | 2 + scripts/scil_frf_set_diffusivities.py | 5 +- scripts/scil_frf_ssst.py | 2 + scripts/scil_gradients_apply_transform.py | 2 + .../scil_gradients_convert_fsl_to_mrtrix.py | 2 + .../scil_gradients_convert_mrtrix_to_fsl.py | 2 + scripts/scil_gradients_generate_sampling.py | 8 ++-- scripts/scil_gradients_modify_axes.py | 2 + scripts/scil_gradients_round_bvals.py | 2 + scripts/scil_gradients_validate_correct.py | 2 + .../scil_gradients_validate_correct_eddy.py | 2 + scripts/scil_labels_combine.py | 2 + scripts/scil_labels_dilate.py | 2 + scripts/scil_labels_remove.py | 2 + scripts/scil_labels_split_volume_by_ids.py | 2 + scripts/scil_labels_split_volume_from_lut.py | 2 + scripts/scil_mti_maps_MT.py | 1 + scripts/scil_mti_maps_ihMT.py | 1 + scripts/scil_plot_mean_std_per_point.py | 8 ++-- scripts/scil_qball_metrics.py | 5 +- scripts/scil_rgb_convert.py | 5 +- scripts/scil_sh_convert.py | 14 +++--- scripts/scil_sh_fusion.py | 2 + scripts/scil_sh_to_rish.py | 2 + scripts/scil_sh_to_sf.py | 8 +++- scripts/scil_surface_apply_transform.py | 2 + scripts/scil_surface_convert.py | 2 + scripts/scil_surface_flip.py | 2 + scripts/scil_surface_smooth.py | 2 + scripts/scil_tracking_local.py | 2 + scripts/scil_tracking_local_dev.py | 4 +- scripts/scil_tracking_pft.py | 35 ++++++++------ scripts/scil_tracking_pft_maps.py | 2 + scripts/scil_tracking_pft_maps_edit.py | 2 + scripts/scil_tractogram_apply_transform.py | 4 +- scripts/scil_tractogram_commit.py | 2 + scripts/scil_tractogram_compress.py | 2 + scripts/scil_tractogram_convert.py | 2 + scripts/scil_tractogram_count_streamlines.py | 2 + scripts/scil_tractogram_cut_streamlines.py | 2 + scripts/scil_tractogram_detect_loops.py | 2 + scripts/scil_tractogram_extract_ushape.py | 4 +- scripts/scil_tractogram_filter_by_anatomy.py | 6 ++- scripts/scil_tractogram_filter_by_length.py | 5 +- .../scil_tractogram_filter_by_orientation.py | 2 + scripts/scil_tractogram_filter_by_roi.py | 39 ++++++++++----- scripts/scil_tractogram_fix_trk.py | 2 + scripts/scil_tractogram_flip.py | 2 + scripts/scil_tractogram_math.py | 2 + scripts/scil_tractogram_qbx.py | 6 ++- scripts/scil_tractogram_register.py | 4 +- scripts/scil_tractogram_remove_invalid.py | 5 +- scripts/scil_tractogram_resample.py | 2 + scripts/scil_tractogram_resample_nb_points.py | 2 + scripts/scil_tractogram_segment_bundles.py | 14 ++++-- .../scil_tractogram_segment_one_bundles.py | 2 + scripts/scil_tractogram_shuffle.py | 2 + scripts/scil_tractogram_smooth.py | 11 +++-- scripts/scil_tractogram_split.py | 2 + .../scil_tractogram_uniformize_endpoints.py | 11 +++-- scripts/scil_visualize_seeds.py | 4 +- scripts/scil_volume_apply_transform.py | 2 + scripts/scil_volume_count_non_zero_voxels.py | 2 + scripts/scil_volume_crop.py | 2 + scripts/scil_volume_flip.py | 2 + scripts/scil_volume_math.py | 4 +- scripts/scil_volume_remove_outliers_ransac.py | 2 + scripts/scil_volume_resample.py | 2 + scripts/scil_volume_reshape_to_reference.py | 5 +- 126 files changed, 463 insertions(+), 176 deletions(-) diff --git a/scripts/legacy/scil_compute_bundle_volume.py b/scripts/legacy/scil_compute_bundle_volume.py index f54bfab18d..7ec5c7f375 100644 --- a/scripts/legacy/scil_compute_bundle_volume.py +++ b/scripts/legacy/scil_compute_bundle_volume.py @@ -12,7 +12,7 @@ """ -@deprecate_script("scil_compute_bundle_volume.pu", DEPRECATION_MSG, '1.7.0') +@deprecate_script("scil_compute_bundle_volume.py", DEPRECATION_MSG, '1.7.0') def main(): new_main() diff --git a/scripts/legacy/scil_compute_fodf_max_in_ventricles.py b/scripts/legacy/scil_compute_fodf_max_in_ventricles.py index 655180601f..8534f2ea18 100755 --- a/scripts/legacy/scil_compute_fodf_max_in_ventricles.py +++ b/scripts/legacy/scil_compute_fodf_max_in_ventricles.py @@ -11,7 +11,8 @@ """ -@deprecate_script("scil_compute_fodf_max_in_ventricles.py", DEPRECATION_MSG, '1.7.0') +@deprecate_script("scil_compute_fodf_max_in_ventricles.py", DEPRECATION_MSG, + '1.7.0') def main(): new_main() diff --git a/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py b/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py index ee06b64a7e..35bb1c97ce 100755 --- a/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py +++ b/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py @@ -11,7 +11,8 @@ """ -@deprecate_script("scil_compute_lobe_specific_fodf_metrics.py", DEPRECATION_MSG, '1.7.0') +@deprecate_script("scil_compute_lobe_specific_fodf_metrics.py", + DEPRECATION_MSG, '1.7.0') def main(): new_main() diff --git a/scripts/legacy/scil_remove_invalid_streamlines.py b/scripts/legacy/scil_remove_invalid_streamlines.py index 367cdabe6e..04c2ca2111 100755 --- a/scripts/legacy/scil_remove_invalid_streamlines.py +++ b/scripts/legacy/scil_remove_invalid_streamlines.py @@ -12,7 +12,8 @@ """ -@deprecate_script("scil_remove_invalid_streamlines.py", DEPRECATION_MSG, '1.7.0') +@deprecate_script("scil_remove_invalid_streamlines.py", DEPRECATION_MSG, + '1.7.0') def main(): new_main() diff --git a/scripts/legacy/scil_resample_streamlines.py b/scripts/legacy/scil_resample_streamlines.py index ea261e4554..d900733358 100755 --- a/scripts/legacy/scil_resample_streamlines.py +++ b/scripts/legacy/scil_resample_streamlines.py @@ -6,8 +6,8 @@ DEPRECATION_MSG = """ -This script has been renamed scil_tractogram_resample_nb_points.py. Please change -your existing pipelines accordingly. +This script has been renamed scil_tractogram_resample_nb_points.py. Please +change your existing pipelines accordingly. """ diff --git a/scripts/legacy/scil_uniformize_streamlines_endpoints.py b/scripts/legacy/scil_uniformize_streamlines_endpoints.py index 8d228a2b4b..5fcab086fa 100755 --- a/scripts/legacy/scil_uniformize_streamlines_endpoints.py +++ b/scripts/legacy/scil_uniformize_streamlines_endpoints.py @@ -6,8 +6,8 @@ DEPRECATION_MSG = """ -This script has been renamed scil_tractogram_uniformize_endpoints.py. Please change -your existing pipelines accordingly. +This script has been renamed scil_tractogram_uniformize_endpoints.py. Please +change your existing pipelines accordingly. """ diff --git a/scripts/scil_NODDI_maps.py b/scripts/scil_NODDI_maps.py index 3c4999123b..458bcc6e91 100755 --- a/scripts/scil_NODDI_maps.py +++ b/scripts/scil_NODDI_maps.py @@ -4,6 +4,8 @@ """ Compute NODDI [1] maps using AMICO. Multi-shell DWI necessary. + +Formally: scil_compute_NODDI.py """ import argparse diff --git a/scripts/scil_NODDI_priors.py b/scripts/scil_NODDI_priors.py index 02f4553761..8297ae36fc 100755 --- a/scripts/scil_NODDI_priors.py +++ b/scripts/scil_NODDI_priors.py @@ -3,6 +3,8 @@ """ Compute the axial (para_diff) and mean (iso_diff) diffusivity priors for NODDI. + +Formally: scil_compute_NODDI_priors.py """ import argparse @@ -66,15 +68,15 @@ def _build_arg_parser(): g3 = p.add_argument_group('Outputs') g3.add_argument('--out_txt_1fiber', metavar='FILE', help='Output path for the text file containing the single ' - 'fiber average value of AD.\nIf not set, the file will not ' - 'be saved.') + 'fiber average value of AD.\nIf not set, the file ' + 'will not be saved.') g3.add_argument('--out_mask_1fiber', metavar='FILE', help='Output path for single fiber mask. If not set, the ' 'mask will not be saved.') g3.add_argument('--out_txt_ventricles', metavar='FILE', help='Output path for the text file containing the ' - 'ventricles average value of MD.\nIf not set, the file ' - 'will not be saved.') + 'ventricles average value of MD.\nIf not set, the ' + 'file will not be saved.') g3.add_argument('--out_mask_ventricles', metavar='FILE', help='Output path for the ventricule mask.\nIf not set, ' 'the mask will not be saved.') diff --git a/scripts/scil_aodf_metrics.py b/scripts/scil_aodf_metrics.py index f1108f6287..c130809eac 100755 --- a/scripts/scil_aodf_metrics.py +++ b/scripts/scil_aodf_metrics.py @@ -22,6 +22,8 @@ perfectly symmetric signal and 1 to a perfectly anti-symmetric signal. It is given as the ratio of the L2-norm of odd SH coefficients on the L2-norm of all SH coefficients. + +Formally: scil_compute_asym_odf_metrics.py """ diff --git a/scripts/scil_apply_transform_to_hdf5.py b/scripts/scil_apply_transform_to_hdf5.py index 1d57652c89..4fb059cb3b 100755 --- a/scripts/scil_apply_transform_to_hdf5.py +++ b/scripts/scil_apply_transform_to_hdf5.py @@ -9,16 +9,16 @@ https://scilpy.readthedocs.io/en/latest/documentation/tractogram_registration.html Example: -To apply transform from ANTS to tractogram. If the ANTS commands was -MOVING->REFERENCE, this will bring a tractogram from MOVING->REFERENCE -scil_apply_transform_to_tractogram.py ${MOVING_FILE} ${REFERENCE_FILE} +To apply transform from ANTS to hdf5 (.h5). If the ANTS commands was +MOVING->REFERENCE, this will bring a hdf5 (.h5) from MOVING->REFERENCE +scil_apply_transform_to_hdf5.py ${MOVING_FILE} ${REFERENCE_FILE} 0GenericAffine.mat ${OUTPUT_NAME} --inverse --in_deformation 1InverseWarp.nii.gz -If the ANTS commands was MOVING->REFERENCE, this will bring a tractogram +If the ANTS commands was MOVING->REFERENCE, this will bring a hdf5 from REFERENCE->MOVING -scil_apply_transform_to_tractogram.py ${MOVING_FILE} ${REFERENCE_FILE} +scil_apply_transform_to_hdf5.py ${MOVING_FILE} ${REFERENCE_FILE} 0GenericAffine.mat ${OUTPUT_NAME} --in_deformation 1Warp.nii.gz --reverse_operation @@ -47,14 +47,14 @@ def _build_arg_parser(): description=__doc__) p.add_argument('in_hdf5', - help='Path of the tractogram to be transformed.') + help='Path of the hdf5 (.h5) to be transformed.') p.add_argument('in_target_file', - help='Path of the reference target file (.trk or .nii).') + help='Path of the reference target file (.nii).') p.add_argument('in_transfo', help='Path of the file containing the 4x4 \n' 'transformation, matrix (.txt, .npy or .mat).') p.add_argument('out_hdf5', - help='Output tractogram filename (transformed data).') + help='Output hdf5 (.h5) filename (transformed data).') p.add_argument('--inverse', action='store_true', help='Apply the inverse linear transformation.') @@ -117,25 +117,27 @@ def main(): moving_sft.data_per_streamline[dps_key] \ = in_hdf5_file[key][dps_key] - new_sft = transform_warp_sft(moving_sft, transfo, target_img, - inverse=args.inverse, - deformation_data=deformation_data, - reverse_op=args.reverse_operation, - remove_invalid=not args.cut_invalid, - cut_invalid=args.cut_invalid) + new_sft = transform_warp_sft( + moving_sft, transfo, target_img, + inverse=args.inverse, + deformation_data=deformation_data, + reverse_op=args.reverse_operation, + remove_invalid=not args.cut_invalid, + cut_invalid=args.cut_invalid) new_sft.to_vox() new_sft.to_corner() - affine, dimensions, voxel_sizes, voxel_order = get_reference_info( - target_img) + affine, dimensions, voxel_sizes, voxel_order =\ + get_reference_info(target_img) out_hdf5_file.attrs['affine'] = affine out_hdf5_file.attrs['dimensions'] = dimensions out_hdf5_file.attrs['voxel_sizes'] = voxel_sizes out_hdf5_file.attrs['voxel_order'] = voxel_order group = out_hdf5_file[key] - group.create_dataset('data', - data=new_sft.streamlines._data.astype(np.float32)) + group.create_dataset( + 'data', + data=new_sft.streamlines._data.astype(np.float32)) group.create_dataset('offsets', data=new_sft.streamlines._offsets) group.create_dataset('lengths', @@ -144,11 +146,13 @@ def main(): if dps_key not in ['data', 'offsets', 'lengths']: if in_hdf5_file[key][dps_key].shape \ == in_hdf5_file[key]['offsets']: - group.create_dataset(dps_key, - data=new_sft.data_per_streamline[dps_key]) + group.create_dataset( + dps_key, + data=new_sft.data_per_streamline[dps_key]) else: - group.create_dataset(dps_key, - data=in_hdf5_file[key][dps_key]) + group.create_dataset( + dps_key, + data=in_hdf5_file[key][dps_key]) if __name__ == "__main__": diff --git a/scripts/scil_assign_custom_color_to_tractogram.py b/scripts/scil_assign_custom_color_to_tractogram.py index 3c253a1d97..a7e6d17b29 100755 --- a/scripts/scil_assign_custom_color_to_tractogram.py +++ b/scripts/scil_assign_custom_color_to_tractogram.py @@ -116,7 +116,8 @@ def _build_arg_parser(): g2.add_argument('--max_cmap', type=float, help='Set the maximum value of the colormap.') g2.add_argument('--log', action='store_true', - help='Apply a base 10 logarithm for colored trk (dps/dpp).') + help='Apply a base 10 logarithm for colored trk (dps/dpp).' + ) g2.add_argument('--LUT', metavar='FILE', help='If the dps/dpp or anatomy contain integer labels, ' 'the value will be substituted.\nIf the LUT has 20 ' @@ -182,7 +183,7 @@ def main(): if np.any(sft.streamlines._lengths < len(LUT)): logging.warning('Some streamlines have fewer point than the size ' 'of the provided LUT.\nConsider using ' - 'scil_resample_streamlines.py') + 'scil_tractogram_resample_nb_points.py') cmap = get_colormap(args.colormap) if args.use_dps or args.use_dpp or args.load_dps or args.load_dpp: diff --git a/scripts/scil_btensor_metrics.py b/scripts/scil_btensor_metrics.py index bb037dbeb4..544ac890cb 100755 --- a/scripts/scil_btensor_metrics.py +++ b/scripts/scil_btensor_metrics.py @@ -28,6 +28,8 @@ and Daniel Topgaard. An open-source framework for analysis of multidimensional diffusion MRI data implemented in MATLAB. Proc. Intl. Soc. Mag. Reson. Med. (26), Paris, France, 2018. + +Formally: scil_compute_divide.py """ import argparse diff --git a/scripts/scil_bundle_compute_centroid.py b/scripts/scil_bundle_compute_centroid.py index fd5dc91baa..da960853f6 100755 --- a/scripts/scil_bundle_compute_centroid.py +++ b/scripts/scil_bundle_compute_centroid.py @@ -3,6 +3,8 @@ """ Compute a single bundle centroid, using an 'infinite' QuickBundles threshold. + +Formally: scil_compute_centroid.py """ import argparse diff --git a/scripts/scil_bundle_compute_endpoints_map.py b/scripts/scil_bundle_compute_endpoints_map.py index c35c693ee3..a17b6f30a7 100755 --- a/scripts/scil_bundle_compute_endpoints_map.py +++ b/scripts/scil_bundle_compute_endpoints_map.py @@ -10,7 +10,9 @@ Note: If the streamlines are not ordered the head/tail are random and not really two coherent groups. Use the following script to order streamlines: -scil_uniformize_streamlines_endpoints.py +scil_tractogram_uniformize_endpoints.py + +Formally: scil_compute_endpoints_map.py """ import argparse diff --git a/scripts/scil_bundle_diameter.py b/scripts/scil_bundle_diameter.py index 2524aea716..9e835bff58 100755 --- a/scripts/scil_bundle_diameter.py +++ b/scripts/scil_bundle_diameter.py @@ -8,8 +8,8 @@ for each section of the bundle. The script expects: -- bundles with coherent endpoints from scil_uniformize_streamlines_endpoints.py -- labels maps with around 5-50 points scil_compute_bundle_voxel_label_map.py +- bundles with coherent endpoints from scil_tractogram_uniformize_endpoints.py +- labels maps with around 5-50 points scil_bundle_label_map.py <5 is not enough, high risk of bad fit >50 is too much, high risk of bad fit - bundles that are close to a tube @@ -23,6 +23,8 @@ and the tube with varying diameter is not easy to color/visualize, the script comes with its own VTK rendering to allow exploration of the data. (optional). + +Formally: scil_estimate_bundles_diameter.py """ import argparse @@ -282,9 +284,10 @@ def main(): stats[bundle_name] = {'diameter': tmp_dict} if args.show_rendering or args.save_rendering: - tube_actor = create_tube_with_radii(centroid_smooth, radius, error, - wireframe=args.wireframe, - error_coloring=args.error_coloring) + tube_actor = create_tube_with_radii( + centroid_smooth, radius, error, + wireframe=args.wireframe, + error_coloring=args.error_coloring) scene.add(tube_actor) cmap = get_colormap('jet') coloring = cmap(pts_labels / np.max(pts_labels))[:, 0:3] diff --git a/scripts/scil_bundle_filter_by_occurence.py b/scripts/scil_bundle_filter_by_occurence.py index 9e7afbbb42..34077e51e0 100755 --- a/scripts/scil_bundle_filter_by_occurence.py +++ b/scripts/scil_bundle_filter_by_occurence.py @@ -9,6 +9,8 @@ If streamlines originate from the same tractogram (ex, to compare various bundle clustering techniques), streamline-wise vote is available to find the streamlines most often included in the bundle. + +Formally: scil_perform_majority_vote.py """ diff --git a/scripts/scil_bundle_generate_priors.py b/scripts/scil_bundle_generate_priors.py index c81f65eaf3..348c28d40d 100755 --- a/scripts/scil_bundle_generate_priors.py +++ b/scripts/scil_bundle_generate_priors.py @@ -5,6 +5,8 @@ Generation of priors and enhanced-FOD from an example/template bundle. The bundle must have been cleaned thorougly before use. The E-FOD can then be used for bundle-specific tractography, but not for FOD metrics. + +Formally: scil_generate_priors_from_bundle.py """ import argparse diff --git a/scripts/scil_bundle_label_map.py b/scripts/scil_bundle_label_map.py index cf7c6ddab5..1c1d1449cc 100755 --- a/scripts/scil_bundle_label_map.py +++ b/scripts/scil_bundle_label_map.py @@ -10,6 +10,8 @@ Each voxel will have the label of its nearest centroid point. The number of labels will be the same as the centroid's number of points. + +Formally: scil_compute_bundle_voxel_label_map.py """ import argparse @@ -17,7 +19,8 @@ from dipy.align.streamlinear import StreamlineLinearRegistration from dipy.io.streamline import save_tractogram -from dipy.io.stateful_tractogram import StatefulTractogram, set_sft_logger_level +from dipy.io.stateful_tractogram import (StatefulTractogram, + set_sft_logger_level) from dipy.io.utils import is_header_compatible import matplotlib.pyplot as plt import nibabel as nib @@ -36,7 +39,8 @@ from scilpy.tractanalysis.distance_to_centroid import min_dist_to_centroid from scilpy.tractograms.streamline_and_mask_operations import \ cut_outside_of_mask_streamlines -from scilpy.tractograms.streamline_operations import resample_streamlines_num_points +from scilpy.tractograms.streamline_operations import \ + resample_streamlines_num_points from scilpy.utils.streamlines import uniformize_bundle_sft from scilpy.viz.utils import get_colormap @@ -160,7 +164,7 @@ def main(): else: srr = StreamlineLinearRegistration() srm = srr.optimize(static=tmp_sft.streamlines, - moving=sft_centroid.streamlines) + moving=sft_centroid.streamlines) sft_centroid.streamlines = srm.transform(sft_centroid.streamlines) uniformize_bundle_sft(concat_sft, ref_bundle=sft_centroid[0]) diff --git a/scripts/scil_bundle_mean_fixel_afd.py b/scripts/scil_bundle_mean_fixel_afd.py index 193c7a6623..7cd4a9d0ab 100755 --- a/scripts/scil_bundle_mean_fixel_afd.py +++ b/scripts/scil_bundle_mean_fixel_afd.py @@ -9,6 +9,8 @@ of the bundle provided, averaged at every voxel. Please use a bundle file rather than a whole tractogram. + +Formally: scil_compute_fixel_afd_from_bundles.py """ import argparse @@ -28,7 +30,8 @@ [1] Raffelt, D., Tournier, JD., Rose, S., Ridgway, GR., Henderson, R., Crozier, S., Salvado, O., & Connelly, A. (2012). Apparent Fibre Density: a novel measure for the analysis of - diffusion-weighted magnetic resonance images. NeuroImage, 59(4), 3976--3994. + diffusion-weighted magnetic resonance images. NeuroImage, 59(4), + 3976--3994. """ @@ -62,15 +65,15 @@ def main(): sft = load_tractogram_with_reference(parser, args, args.in_bundle) fodf_img = nib.load(args.in_fodf) - afd_mean_map, rd_mean_map = afd_map_along_streamlines(sft, - fodf_img, - args.sh_basis, - args.length_weighting) + afd_mean_map, rd_mean_map = afd_map_along_streamlines( + sft, + fodf_img, + args.sh_basis, + args.length_weighting) nib.Nifti1Image(afd_mean_map.astype(np.float32), fodf_img.affine).to_filename(args.afd_mean_map) - if __name__ == '__main__': main() diff --git a/scripts/scil_bundle_mean_fixel_afd_from_hdf5.py b/scripts/scil_bundle_mean_fixel_afd_from_hdf5.py index 16db8a6138..bd0c928484 100755 --- a/scripts/scil_bundle_mean_fixel_afd_from_hdf5.py +++ b/scripts/scil_bundle_mean_fixel_afd_from_hdf5.py @@ -3,12 +3,14 @@ """ Compute the mean Apparent Fiber Density (AFD) and mean Radial fODF (radfODF) -maps along a bundle. +maps for every connections within a hdf5 (.h5) file. This is the "real" fixel-based fODF amplitude along every streamline -of the bundle provided, averaged at every voxel. +of each connection, averaged at every voxel. -Please use a bundle file rather than a whole tractogram. +Please use a hdf5 (.h5) file containing decomposed connections + +Formally: scil_compute_fixel_afd_from_hdf5.py """ import argparse diff --git a/scripts/scil_bundle_mean_fixel_lobe_metric.py b/scripts/scil_bundle_mean_fixel_lobe_metric.py index caf13ee90f..8c1be29bb5 100755 --- a/scripts/scil_bundle_mean_fixel_lobe_metric.py +++ b/scripts/scil_bundle_mean_fixel_lobe_metric.py @@ -7,7 +7,7 @@ found by computing the intersection between the voxel grid and each streamline in the input tractogram. -This script behaves like scil_compute_mean_fixel_afd_from_bundles.py for fODFs, +This script behaves like scil_bundle_mean_fixel_afd.py for fODFs, but here for Bingham distributions. These latest distributions add the unique possibility to capture fixel-based fiber spread (FS) and fiber fraction (FF). FD from the bingham should be "equivalent" to the AFD_fixel we are used to. @@ -21,6 +21,8 @@ with the current streamline segment. Please use a bundle file rather than a whole tractogram. + +Formally: scil_compute_mean_fixel_obe_metric_from_bundles.py """ import argparse diff --git a/scripts/scil_bundle_mean_std.py b/scripts/scil_bundle_mean_std.py index 019e3b6abd..078a15cdbf 100755 --- a/scripts/scil_bundle_mean_std.py +++ b/scripts/scil_bundle_mean_std.py @@ -9,10 +9,13 @@ - Option --per_point: For all streamlines points in the bundle for each metric combination, along the bundle, i.e. for each point. **To create label_map and distance_map, see - scil_compute_bundle_voxel_label_map.py + scil_bundle_label_map.py Density weighting modifies the contribution of voxel with lower/higher streamline count to reduce influence of spurious streamlines. + +Formally: scil_compute_bundle_mean_std_per_point.py or +scil_compute_bundle_mean_std.py """ import argparse diff --git a/scripts/scil_bundle_pairwise_comparison.py b/scripts/scil_bundle_pairwise_comparison.py index 7a01cdbfd0..0f280428c1 100755 --- a/scripts/scil_bundle_pairwise_comparison.py +++ b/scripts/scil_bundle_pairwise_comparison.py @@ -13,6 +13,8 @@ For the streamline representation, the computed similarity measures are: bundle_adjacency_streamlines, dice_streamlines, streamlines_count_overlap, streamlines_count_overreach + +Formally: scil_evaluate_bundles_pairwise_agreement_measures.py """ import argparse @@ -94,12 +96,14 @@ def load_data_tmp_saving(args): disable_centroids = args[3] # Since data is often re-use when comparing multiple bundles, anything - # that can be computed once is saved temporarily and simply loaded on demand + # that can be computed once is saved temporarily and simply loaded on + # demand hash_tmp = hashlib.md5(filename.encode()).hexdigest() tmp_density_filename = os.path.join('tmp_measures/', '{}_density.nii.gz'.format(hash_tmp)) tmp_endpoints_filename = os.path.join('tmp_measures/', - '{}_endpoints.nii.gz'.format(hash_tmp)) + '{}_endpoints.nii.gz'.format( + hash_tmp)) tmp_centroids_filename = os.path.join('tmp_measures/', '{}_centroids.trk'.format(hash_tmp)) @@ -300,9 +304,11 @@ def main(): bundles_references_tuple_extended = link_bundles_and_reference( parser, args, bundles_list) - single_compare_reference_tuple = bundles_references_tuple_extended.pop() - comb_dict_keys = list(itertools.product(bundles_references_tuple_extended, - [single_compare_reference_tuple])) + single_compare_reference_tuple = \ + bundles_references_tuple_extended.pop() + comb_dict_keys = list(itertools.product( + bundles_references_tuple_extended, + [single_compare_reference_tuple])) else: bundles_list = args.in_bundles # Pre-compute the needed files, to avoid conflict when the number diff --git a/scripts/scil_bundle_score_many_bundles_one_tractogram.py b/scripts/scil_bundle_score_many_bundles_one_tractogram.py index 71f32194df..6db5fa1470 100755 --- a/scripts/scil_bundle_score_many_bundles_one_tractogram.py +++ b/scripts/scil_bundle_score_many_bundles_one_tractogram.py @@ -36,6 +36,8 @@ "gt_mask": "PATH/bundle0.nii.gz", } } + +Formally: scil_score_bundles.py """ import argparse import glob diff --git a/scripts/scil_bundle_score_same_bundle_many_segmentations.py b/scripts/scil_bundle_score_same_bundle_many_segmentations.py index 6d3725b399..ad81f6e4c1 100755 --- a/scripts/scil_bundle_score_same_bundle_many_segmentations.py +++ b/scripts/scil_bundle_score_same_bundle_many_segmentations.py @@ -25,6 +25,8 @@ The computed binary classification measures are: sensitivity, specificity, precision, accuracy, dice, kappa, youden for both the streamline and voxel representation (if provided). + +Formally: scil_evaluate_bundles_binary_classification_measures.py """ import argparse @@ -63,10 +65,12 @@ def _build_arg_parser(): help='Path of the output json.') p.add_argument('--streamlines_measures', nargs=2, metavar=('GOLD_STANDARD_STREAMLINES', 'TRACTOGRAM'), - help='The gold standard bundle and the original tractogram.') + help='The gold standard bundle and the original ' + 'tractogram.') p.add_argument('--voxels_measures', nargs=2, metavar=('GOLD_STANDARD_MASK', 'TRACKING MASK'), - help='The gold standard mask and the original tracking mask.') + help='The gold standard mask and the original tracking ' + 'mask.') add_processes_arg(p) add_reference_arg(p) @@ -98,10 +102,11 @@ def compute_voxel_measures(args): binary_3d_indices = np.where(binary_3d.flatten() > 0)[0] gs_binary_3d_indices = np.where(gs_binary_3d.flatten() > 0)[0] - voxels_binary = binary_classification(binary_3d_indices, - gs_binary_3d_indices, - int(np.prod(tracking_mask.shape)), - mask_count=np.count_nonzero(tracking_mask)) + voxels_binary = binary_classification( + binary_3d_indices, + gs_binary_3d_indices, + int(np.prod(tracking_mask.shape)), + mask_count=np.count_nonzero(tracking_mask)) return dict(zip(['sensitivity_voxels', 'specificity_voxels', @@ -132,7 +137,8 @@ def compute_streamlines_measures(args): logging.info('{} is empty'.format(bundle_filename)) return None - _, streamlines_indices = intersection_robust([wb_streamlines, bundle_streamlines]) + _, streamlines_indices = intersection_robust([wb_streamlines, + bundle_streamlines]) streamlines_binary = binary_classification(streamlines_indices, gs_streamlines_indices, @@ -183,7 +189,8 @@ def main(): _, gs_dimensions, _, _ = gs_sft.space_attributes # Prepare the gold standard only once - _, gs_streamlines_indices = intersection_robust([wb_streamlines, gs_streamlines]) + _, gs_streamlines_indices = intersection_robust([wb_streamlines, + gs_streamlines]) if nbr_cpu == 1: streamlines_dict = [] @@ -212,7 +219,8 @@ def main(): else: gs_binary_3d = get_data_as_mask(nib.load(args.voxels_measures[0])) gs_binary_3d[gs_binary_3d > 0] = 1 - tracking_mask_data = get_data_as_mask(nib.load(args.voxels_measures[1])) + tracking_mask_data = get_data_as_mask(nib.load( + args.voxels_measures[1])) tracking_mask_data[tracking_mask_data > 0] = 1 if nbr_cpu == 1: diff --git a/scripts/scil_bundle_shape_measures.py b/scripts/scil_bundle_shape_measures.py index 55c0126bac..1e16a70a58 100755 --- a/scripts/scil_bundle_shape_measures.py +++ b/scripts/scil_bundle_shape_measures.py @@ -8,7 +8,7 @@ - volume_info: volume, volume_endpoints - streamlines_info: streamlines_count, avg_length (in mm or in number of point), average step size, min_length, max_length. - ** You may also get this information with scil_tractogram_print_info. + ** You may also get this information with scil_tractogram_print_info.py. - shape_info: span, curl, diameter, elongation, surface area, irregularity, end surface area, radius, end surface irregularity, mean_curvature, fractal dimension. @@ -30,6 +30,9 @@ The fractal dimension is dependent on the voxel size and the number of voxels. If data comparison is performed, the bundles MUST be in same resolution. + +Formally: scil_compute_bundle_volume.py or +scil_evaluate_bundles_individual_measures.py """ import argparse diff --git a/scripts/scil_bundle_volume_per_label.py b/scripts/scil_bundle_volume_per_label.py index f13b50873a..71cece4d50 100755 --- a/scripts/scil_bundle_volume_per_label.py +++ b/scripts/scil_bundle_volume_per_label.py @@ -6,12 +6,14 @@ resolution. Volume is estimated by counting the number of voxel occupied by each label and multiplying it by the volume of a single voxel. -The labels can be obtained by scil_bundle_compute_voxel_label_map.py +The labels can be obtained by scil_bundle_label_map.py. This estimation is typically performed at resolution around 1mm3. To get the volume and other measures directly from the (whole) bundle, use scil_bundle_shape_measures.py. + +Formally: scil_compute_bundle_volume_per_label.py """ import argparse diff --git a/scripts/scil_clean_qbx_clusters.py b/scripts/scil_clean_qbx_clusters.py index 40c410f804..5f293991cf 100755 --- a/scripts/scil_clean_qbx_clusters.py +++ b/scripts/scil_clean_qbx_clusters.py @@ -6,7 +6,7 @@ visual inspection. Useful for cleaning bundles for RBx, BST or for figures. The VTK window does not handle well opacity of streamlines, this is a normal rendering behavior. - Often use in pair with scil_compute_qbx.py. + Often use in pair with scil_tractogram_qbx.py. Key mapping: - a/A: accept displayed clusters @@ -59,7 +59,8 @@ def _build_arg_parser(): help='Opacity of the background streamlines.' 'Keep low between 0 and 0.5 [%(default)s].') p.add_argument('--background_linewidth', type=float, default=1, - help='Linewidth of the background streamlines [%(default)s].') + help='Linewidth of the background streamlines [%(default)s]' + '.') p.add_argument('--clusters_linewidth', type=float, default=1, help='Linewidth of the current cluster [%(default)s].') diff --git a/scripts/scil_connectivity_compare_populations.py b/scripts/scil_connectivity_compare_populations.py index cceefe1b65..1154f2b2cd 100755 --- a/scripts/scil_connectivity_compare_populations.py +++ b/scripts/scil_connectivity_compare_populations.py @@ -16,6 +16,8 @@ --filtering_mask will simply multiply the binary mask to all input matrices before performing the statistical comparison. Reduces the number of statistical tests, useful when using --fdr or --bonferroni. + +Formally: scil_compare_connectivity.py """ import argparse @@ -60,8 +62,8 @@ def _build_arg_parser(): 'right: mean of g2 < mean of g1,\n' 'both: both means are not equal (default).') p.add_argument('--paired', action='store_true', - help='Use paired sample t-test instead of population t-test.\n' - '--in_g1 and --in_g2 must be ordered the same way.') + help='Use paired sample t-test instead of population t-test' + '.\n--in_g1 and --in_g2 must be ordered the same way.') fwe = p.add_mutually_exclusive_group() fwe.add_argument('--fdr', action='store_true', diff --git a/scripts/scil_connectivity_compute_matrices.py b/scripts/scil_connectivity_compute_matrices.py index 27f0bd9bff..79412a2ed0 100755 --- a/scripts/scil_connectivity_compute_matrices.py +++ b/scripts/scil_connectivity_compute_matrices.py @@ -35,6 +35,8 @@ lesion(s) and the total of streamlines going through the lesion(s) for of each connection. Each connection can be seen as a 'bundle' and then something similar to scil_analyse_lesion_load.py is run for each 'bundle'. + +Formally: scil_compute_connectivity.py """ import argparse @@ -60,7 +62,8 @@ add_verbose_arg, assert_inputs_exist, assert_outputs_exist, validate_nbr_processes) -from scilpy.tractanalysis.reproducibility_measures import compute_bundle_adjacency_voxel +from scilpy.tractanalysis.reproducibility_measures import \ + compute_bundle_adjacency_voxel from scilpy.tractanalysis.streamlines_metrics import compute_tract_counts_map from scilpy.utils.metrics_tools import compute_lesion_stats @@ -257,7 +260,8 @@ def _build_arg_parser(): help='Minimum lesion volume in mm3 [%(default)s].') p.add_argument('--density_weighting', action="store_true", - help='Use density-weighting for the metric weighted matrix.') + help='Use density-weighting for the metric weighted' + 'matrix.') p.add_argument('--no_self_connection', action="store_true", help='Eliminate the diagonal from the matrices.') p.add_argument('--include_dps', metavar='OUT_DIR', @@ -377,13 +381,14 @@ def main(): measures_dict_list = [] if nbr_cpu == 1: for comb in comb_list: - measures_dict_list.append(_processing_wrapper([args.in_hdf5, - img_labels, comb, - measures_to_compute, - args.similarity, - args.density_weighting, - args.include_dps, - args.min_lesion_vol])) + measures_dict_list.append(_processing_wrapper( + [args.in_hdf5, + img_labels, comb, + measures_to_compute, + args.similarity, + args.density_weighting, + args.include_dps, + args.min_lesion_vol])) else: pool = multiprocessing.Pool(nbr_cpu) measures_dict_list = pool.map(_processing_wrapper, @@ -396,7 +401,8 @@ def main(): itertools.repeat( args.density_weighting), itertools.repeat(args.include_dps), - itertools.repeat(args.min_lesion_vol))) + itertools.repeat(args.min_lesion_vol) + )) pool.close() pool.join() diff --git a/scripts/scil_connectivity_filter.py b/scripts/scil_connectivity_filter.py index 26f3530774..cdb9da02ac 100755 --- a/scripts/scil_connectivity_filter.py +++ b/scripts/scil_connectivity_filter.py @@ -31,6 +31,8 @@ conditions must be met in order not to be filtered. If the user wants to manually handle the requirements, --keep_condition_count can be used and manually binarized using scil_connectivity_math.py + +Formally: scil_filter_connectivity.py """ import argparse diff --git a/scripts/scil_connectivity_graph_measures.py b/scripts/scil_connectivity_graph_measures.py index 351d874a10..f6abbca632 100755 --- a/scripts/scil_connectivity_graph_measures.py +++ b/scripts/scil_connectivity_graph_measures.py @@ -7,9 +7,9 @@ some measures require one or the other. This script evaluates the measures one subject at the time. To generate a -population dictionary (similarly to other scil_evaluate_*.py scripts), use the ---append_json option as well as using the same output filename. ->>> for i in hcp/*/; do scil_evaluate_connectivity_measures.py ${i}/sc_prob.npy +population dictionary (similarly to other scil_connectivity_*.py scripts), use +the --append_json option as well as using the same output filename. +>>> for i in hcp/*/; do scil_connectivity_graph_measures.py ${i}/sc_prob.npy ${i}/len_prob.npy hcp_prob.json --append_json --avg_node_wise; done Some measures output one value per node, the default behavior is to list @@ -27,6 +27,8 @@ This script is under the GNU GPLv3 license, for more detail please refer to https://www.gnu.org/licenses/gpl-3.0.en.html + +Formally: scil_evaluate_connectivity_graph_measures.py """ import argparse diff --git a/scripts/scil_connectivity_normalize.py b/scripts/scil_connectivity_normalize.py index 730fc2562f..562e971f15 100755 --- a/scripts/scil_connectivity_normalize.py +++ b/scripts/scil_connectivity_normalize.py @@ -39,6 +39,8 @@ However, the proposed weighting of edge presented in this publication is not implemented. + +Formally: scil_normalize_connectivity.py """ import argparse diff --git a/scripts/scil_connectivity_pairwise_agreement.py b/scripts/scil_connectivity_pairwise_agreement.py index 62946ae39f..14a4d22a3a 100755 --- a/scripts/scil_connectivity_pairwise_agreement.py +++ b/scripts/scil_connectivity_pairwise_agreement.py @@ -6,6 +6,8 @@ The computed similarity measures are: sum of square difference and pearson correlation coefficent + +Formally: scil_evaluate_connectivity_pairwaise_agreement_measures.py """ import argparse diff --git a/scripts/scil_connectivity_print_filenames.py b/scripts/scil_connectivity_print_filenames.py index 258ec905cd..7bcb35ad0f 100755 --- a/scripts/scil_connectivity_print_filenames.py +++ b/scripts/scil_connectivity_print_filenames.py @@ -15,6 +15,8 @@ for file in $(cat pass.txt); do mv ${SOMEWHERE}/${FILE} ${SOMEWHERE_ELSE}/; done + +Formally: scil_print_connectivity_filenames.py """ import argparse diff --git a/scripts/scil_connectivity_reorder_rois.py b/scripts/scil_connectivity_reorder_rois.py index 435d682a21..677e5acf44 100755 --- a/scripts/scil_connectivity_reorder_rois.py +++ b/scripts/scil_connectivity_reorder_rois.py @@ -19,6 +19,8 @@ sparse matrix into an ordering that reduces the matrix bandwidth. The output file can then be re-used with --in_ordering. Only one input can be used with this option, we recommand an average streamline count or volume matrix. + +Formally: scil_reorder_connectivity.py """ import argparse diff --git a/scripts/scil_dki_metrics.py b/scripts/scil_dki_metrics.py index 1b61121dda..ea937f07ac 100755 --- a/scripts/scil_dki_metrics.py +++ b/scripts/scil_dki_metrics.py @@ -38,6 +38,8 @@ therein. [1] examples_built/reconst_dki/#example-reconst-dki [2] examples_built/reconst_msdki/#example-reconst-msdki + +Formally: scil_compute_kurtosis_metrics.py """ import argparse @@ -57,7 +59,8 @@ from scilpy.io.image import get_data_as_mask from scilpy.io.utils import (add_overwrite_arg, assert_inputs_exist, assert_outputs_exist, add_force_b0_arg) -from scilpy.gradients.bvec_bval_tools import (normalize_bvecs, is_normalized_bvecs, +from scilpy.gradients.bvec_bval_tools import (normalize_bvecs, + is_normalized_bvecs, check_b0_threshold, identify_shells) diff --git a/scripts/scil_dti_metrics.py b/scripts/scil_dti_metrics.py index 64c31a4cf9..c4e955d0c2 100755 --- a/scripts/scil_dti_metrics.py +++ b/scripts/scil_dti_metrics.py @@ -20,6 +20,8 @@ signals, pulsation and misalignment artifacts, see [J-D Tournier, S. Mori, A. Leemans. Diffusion Tensor Imaging and Beyond. MRM 2011]. + +Formally: scil_compute_dti_metrics.py """ import argparse diff --git a/scripts/scil_dwi_apply_bias_field.py b/scripts/scil_dwi_apply_bias_field.py index 3891efa75d..2f4e175162 100755 --- a/scripts/scil_dwi_apply_bias_field.py +++ b/scripts/scil_dwi_apply_bias_field.py @@ -5,6 +5,8 @@ Apply bias field correction to DWI. This script doesn't compute the bias field itself. It ONLY applies an existing bias field. Use the ANTs N4BiasFieldCorrection executable to compute the bias field. + +Formally: scil_apply_bias_field_on_dwi.py """ import argparse diff --git a/scripts/scil_dwi_compute_snr.py b/scripts/scil_dwi_compute_snr.py index f2a7083087..73e852b346 100755 --- a/scripts/scil_dwi_compute_snr.py +++ b/scripts/scil_dwi_compute_snr.py @@ -26,6 +26,7 @@ [2] Reymbaut, et al (2021). Magic DIAMOND... https://doi.org/10.1016/j.media.2021.101988 +Formally: scil_snr_in_roi.py """ import argparse diff --git a/scripts/scil_dwi_concatenate.py b/scripts/scil_dwi_concatenate.py index e20cf8bca3..b130d4e66d 100755 --- a/scripts/scil_dwi_concatenate.py +++ b/scripts/scil_dwi_concatenate.py @@ -4,6 +4,8 @@ """ Concatenate DWI, bval and bvecs together. File must be specified in matching order. Default data type will be the same as the first input DWI. + +Formally: scil_concatenate_dwi.py """ import argparse diff --git a/scripts/scil_dwi_extract_b0.py b/scripts/scil_dwi_extract_b0.py index b96ab4ee18..13c71f81ab 100755 --- a/scripts/scil_dwi_extract_b0.py +++ b/scripts/scil_dwi_extract_b0.py @@ -5,6 +5,8 @@ Extract B0s from DWI, based on the bval and bvec information. The default behavior is to save the first b0 of the series. + +Formally: scil_extract_b0.py """ import argparse diff --git a/scripts/scil_dwi_extract_shell.py b/scripts/scil_dwi_extract_shell.py index af9e4892ea..f0ccab25b8 100755 --- a/scripts/scil_dwi_extract_shell.py +++ b/scripts/scil_dwi_extract_shell.py @@ -15,6 +15,7 @@ setting the --block-size argument. A block size of X means that X DWI volumes are loaded at a time for processing. +Formally: scil_extract_dwi_shell.py """ import argparse diff --git a/scripts/scil_dwi_powder_average.py b/scripts/scil_dwi_powder_average.py index c99a4e3d00..5fd0ef97eb 100755 --- a/scripts/scil_dwi_powder_average.py +++ b/scripts/scil_dwi_powder_average.py @@ -12,6 +12,8 @@ Script currently does not take into account the diffusion gradient directions being averaged. + +Formally: scil_compute_powder_average.py """ import argparse diff --git a/scripts/scil_dwi_reorder_philips.py b/scripts/scil_dwi_reorder_philips.py index 0af2fe01ee..49b0133ca5 100755 --- a/scripts/scil_dwi_reorder_philips.py +++ b/scripts/scil_dwi_reorder_philips.py @@ -4,6 +4,8 @@ """ Re-order gradient according to original table (Philips) This script is not needed for version 5.6 and higher + +Formally: scil_reorder_dwi_philips.py """ import argparse @@ -76,7 +78,8 @@ def main(): curr_version = curr_version.replace('\\', ' ').replace('_', ' ').split()[0] - if version.parse(SOFTWARE_VERSION_MIN) <= version.parse(curr_version): + if version.parse(SOFTWARE_VERSION_MIN) <= version.parse( + curr_version): sys.exit('ERROR: There is no need for reording since your ' 'dwi comes from a Philips machine with ' 'version {}. '.format(curr_version) + diff --git a/scripts/scil_dwi_split_by_indices.py b/scripts/scil_dwi_split_by_indices.py index 2578dafb34..edadcdaf84 100755 --- a/scripts/scil_dwi_split_by_indices.py +++ b/scripts/scil_dwi_split_by_indices.py @@ -11,6 +11,7 @@ extraction does not work. For instance, if one wants to split the x first b-1500s from the rest of the b-1500s in an image, simply put x as an index. +Formally: scil_split_image.py """ import argparse diff --git a/scripts/scil_dwi_to_sh.py b/scripts/scil_dwi_to_sh.py index 762ff78c07..9d239eef66 100755 --- a/scripts/scil_dwi_to_sh.py +++ b/scripts/scil_dwi_to_sh.py @@ -3,6 +3,8 @@ """ Script to compute the SH coefficient directly on the raw DWI signal. + +Formally: scil_compute_sh_from_signal.py """ import argparse diff --git a/scripts/scil_fodf_lobe_specific_metrics.py b/scripts/scil_fodf_lobe_specific_metrics.py index 53477523bb..55d93f523b 100755 --- a/scripts/scil_fodf_lobe_specific_metrics.py +++ b/scripts/scil_fodf_lobe_specific_metrics.py @@ -15,6 +15,8 @@ Using 12 threads, the execution takes 10 minutes for FD estimation for a brain with 1mm isotropic resolution. Other metrics take less than a second. + +Formally: scil_compute_lobe_specific_fodf_metrics.py """ import nibabel as nib diff --git a/scripts/scil_fodf_max_in_ventricles.py b/scripts/scil_fodf_max_in_ventricles.py index f574e8e78f..fe864d8c35 100755 --- a/scripts/scil_fodf_max_in_ventricles.py +++ b/scripts/scil_fodf_max_in_ventricles.py @@ -6,6 +6,8 @@ estimated from a MD and FA threshold. This allows to clip the noise of fODF using an absolute thresold. + +Formally: scil_compute_fodf_max_in_ventricles.py """ import argparse diff --git a/scripts/scil_fodf_memsmt.py b/scripts/scil_fodf_memsmt.py index 96c5acb2db..e4a1b4fb07 100755 --- a/scripts/scil_fodf_memsmt.py +++ b/scripts/scil_fodf_memsmt.py @@ -26,9 +26,11 @@ If --not_all is set, only the files specified explicitly by the flags will be output. -Based on P. Karan et al., Bridging the gap between constrained spherical -deconvolution and diffusional variance decomposition via tensor-valued +Based on P. Karan et al., Bridging the gap between constrained spherical +deconvolution and diffusional variance decomposition via tensor-valued diffusion MRI. Medical Image Analysis (2022) + +Formally: scil_compute_memsmt_fodf.py """ import argparse diff --git a/scripts/scil_fodf_metrics.py b/scripts/scil_fodf_metrics.py index 74502538b8..c14fe94ec2 100755 --- a/scripts/scil_fodf_metrics.py +++ b/scripts/scil_fodf_metrics.py @@ -14,7 +14,7 @@ The --at argument should be set to a value which is 1.5 times the maximal value of the fODF in the ventricules. This can be obtained with the -compute_fodf_max_in_ventricules.py script. +scil_fodf_max_in_ventricles.py script. If the --abs_peaks_and_values argument is set, the peaks are all normalized and the peak_values are equal to the actual fODF amplitude of the peaks. By @@ -29,6 +29,8 @@ See [Raffelt et al. NeuroImage 2012] and [Dell'Acqua et al HBM 2013] for the definitions. + +Formally: scil_compute_fodf_metrics.py """ import argparse @@ -56,15 +58,15 @@ def _build_arg_parser(): help='Discrete sphere to use in the processing ' '[%(default)s].') p.add_argument('--mask', metavar='', - help='Path to a binary mask. Only the data inside the mask\n' - 'will beused for computations and reconstruction ' + help='Path to a binary mask. Only the data inside the mask' + '\nwill beused for computations and reconstruction ' '[%(default)s].') p.add_argument('--at', dest='a_threshold', type=float, default='0.0', help='Absolute threshold on fODF amplitude. This ' 'value should be set to\napproximately 1.5 to 2 times ' 'the maximum fODF amplitude in isotropic voxels\n' - '(ie. ventricles).\nUse scil_fodf_max_in_ventricles.py ' - 'to find the maximal value.\n' + '(ie. ventricles).\nUse scil_fodf_max_in_ventricles.py' + ' to find the maximal value.\n' 'See [Dell\'Acqua et al HBM 2013] [%(default)s].') p.add_argument('--rt', dest='r_threshold', type=float, default='0.1', help='Relative threshold on fODF amplitude in percentage ' @@ -86,10 +88,11 @@ def _build_arg_parser(): g.add_argument('--afd_max', metavar='file', default='', help='Output filename for the AFD_max map.') g.add_argument('--afd_total', metavar='file', default='', - help='Output filename for the AFD_total map (SH coeff = 0).') + help='Output filename for the AFD_total map' + '(SH coeff = 0).') g.add_argument('--afd_sum', metavar='file', default='', - help='Output filename for the sum of all peak contributions\n' - '(sum of fODF lobes on the sphere).') + help='Output filename for the sum of all peak contributions' + '\n(sum of fODF lobes on the sphere).') g.add_argument('--nufo', metavar='file', default='', help='Output filename for the NuFO map.') g.add_argument('--rgb', metavar='file', default='', @@ -188,10 +191,12 @@ def main(): where=peak_values[..., 0, None] != 0) peak_dirs[...] *= peak_values[..., :, None] if args.peaks: - nib.save(nib.Nifti1Image(reshape_peaks_for_visualization(peak_dirs), - affine), args.peaks) + nib.save(nib.Nifti1Image( + reshape_peaks_for_visualization(peak_dirs), + affine), args.peaks) if args.peak_values: - nib.save(nib.Nifti1Image(peak_values, vol.affine), args.peak_values) + nib.save(nib.Nifti1Image(peak_values, vol.affine), + args.peak_values) if args.peak_indices: nib.save(nib.Nifti1Image(peak_indices, vol.affine), args.peak_indices) diff --git a/scripts/scil_fodf_msmt.py b/scripts/scil_fodf_msmt.py index d88642c45a..6b0f8b5bca 100755 --- a/scripts/scil_fodf_msmt.py +++ b/scripts/scil_fodf_msmt.py @@ -15,6 +15,8 @@ Based on B. Jeurissen et al., Multi-tissue constrained spherical deconvolution for improved analysis of multi-shell diffusion MRI data. Neuroimage (2014) + +Formally: scil_compute_msmt_fodf.py """ import argparse diff --git a/scripts/scil_fodf_ssst.py b/scripts/scil_fodf_ssst.py index d87a109b65..e3354c74c1 100755 --- a/scripts/scil_fodf_ssst.py +++ b/scripts/scil_fodf_ssst.py @@ -5,6 +5,8 @@ Script to compute Constrained Spherical Deconvolution (CSD) fiber ODFs. See [Tournier et al. NeuroImage 2007] + +Formally: scil_compute_ssst_fodf.py """ import argparse diff --git a/scripts/scil_fodf_to_bingham.py b/scripts/scil_fodf_to_bingham.py index 832e855bef..9629c5d3f1 100755 --- a/scripts/scil_fodf_to_bingham.py +++ b/scripts/scil_fodf_to_bingham.py @@ -11,6 +11,8 @@ Using 12 threads, the execution takes approximately 30 minutes for a brain with 1mm isotropic resolution. + +Formally: scil_fit_bingham_to_fodf.py """ import nibabel as nib diff --git a/scripts/scil_freewater_maps.py b/scripts/scil_freewater_maps.py index e97da79719..b2c1c0c59b 100755 --- a/scripts/scil_freewater_maps.py +++ b/scripts/scil_freewater_maps.py @@ -4,6 +4,8 @@ """ Compute Free Water maps [1] using AMICO. This script supports both single and multi-shell data. + +Formally: scil_compute_freewater.py """ import argparse diff --git a/scripts/scil_frf_mean.py b/scripts/scil_frf_mean.py index 7b2861258f..1bd0827ad2 100755 --- a/scripts/scil_frf_mean.py +++ b/scripts/scil_frf_mean.py @@ -4,6 +4,8 @@ """ Compute the mean Fiber Response Function from a set of individually computed Response Functions. + +Formally: scil_compute_mean_frf.py """ import argparse diff --git a/scripts/scil_frf_memsmt.py b/scripts/scil_frf_memsmt.py index 6ca58cf153..e2302acc3f 100755 --- a/scripts/scil_frf_memsmt.py +++ b/scripts/scil_frf_memsmt.py @@ -32,6 +32,8 @@ Based on P. Karan et al., Bridging the gap between constrained spherical deconvolution and diffusional variance decomposition via tensor-valued diffusion MRI. Medical Image Analysis (2022) + +Formally: scil_compute_memsmt_frf.py """ import argparse diff --git a/scripts/scil_frf_msmt.py b/scripts/scil_frf_msmt.py index 9afc47be8d..5ee9cc5f72 100755 --- a/scripts/scil_frf_msmt.py +++ b/scripts/scil_frf_msmt.py @@ -24,6 +24,8 @@ Based on B. Jeurissen et al., Multi-tissue constrained spherical deconvolution for improved analysis of multi-shell diffusion MRI data. Neuroimage (2014) + +Formally: scil_compute_msmt_frf.py """ import argparse diff --git a/scripts/scil_frf_set_diffusivities.py b/scripts/scil_frf_set_diffusivities.py index eadf8966d5..a0f064c2d1 100755 --- a/scripts/scil_frf_set_diffusivities.py +++ b/scripts/scil_frf_set_diffusivities.py @@ -7,6 +7,8 @@ and keep the mean b0. The FRF file is obtained from scil_frf_ssst.py + +Formally: scil_set_response_function.py """ import argparse @@ -31,7 +33,8 @@ def _build_arg_parser(): help='Path of the new FRF file.') p.add_argument('--no_factor', action='store_true', help='If supplied, the fiber response function is\n' - 'evaluated without the x 10**-4 factor. [%(default)s].') + 'evaluated without the x 10**-4 factor. [%(default)s].' + ) add_overwrite_arg(p) return p diff --git a/scripts/scil_frf_ssst.py b/scripts/scil_frf_ssst.py index 0a5eae4457..10bd8d3e86 100755 --- a/scripts/scil_frf_ssst.py +++ b/scripts/scil_frf_ssst.py @@ -6,6 +6,8 @@ A DTI fit is made, and voxels containing a single fiber population are found using a threshold on the FA. + +Formally: scil_compute_ssst_frf.py """ import argparse diff --git a/scripts/scil_gradients_apply_transform.py b/scripts/scil_gradients_apply_transform.py index 98a411eeee..a655fe405a 100755 --- a/scripts/scil_gradients_apply_transform.py +++ b/scripts/scil_gradients_apply_transform.py @@ -3,6 +3,8 @@ """ Transform bvecs using an affine/rigid transformation. + +Formally: scil_apply_transform_to_bvecs.py. """ import argparse diff --git a/scripts/scil_gradients_convert_fsl_to_mrtrix.py b/scripts/scil_gradients_convert_fsl_to_mrtrix.py index 20fc751a72..05dce061ee 100755 --- a/scripts/scil_gradients_convert_fsl_to_mrtrix.py +++ b/scripts/scil_gradients_convert_fsl_to_mrtrix.py @@ -3,6 +3,8 @@ """ Script to convert bval/bvec FSL style to MRtrix style. + +Formally: scil_convert_gradients_fsl_to_mrtrix.py """ import argparse diff --git a/scripts/scil_gradients_convert_mrtrix_to_fsl.py b/scripts/scil_gradients_convert_mrtrix_to_fsl.py index 8f8a2463df..669f4ce53a 100755 --- a/scripts/scil_gradients_convert_mrtrix_to_fsl.py +++ b/scripts/scil_gradients_convert_mrtrix_to_fsl.py @@ -2,6 +2,8 @@ # -*- coding: utf-8 -*- """ Script to convert bval/bvec MRtrix style to FSL style. + +Formally: scil_convert_gradients_mrtrix_to_fsl.py """ import argparse diff --git a/scripts/scil_gradients_generate_sampling.py b/scripts/scil_gradients_generate_sampling.py index 9daf6b4b76..2cff7d2fe4 100755 --- a/scripts/scil_gradients_generate_sampling.py +++ b/scripts/scil_gradients_generate_sampling.py @@ -9,6 +9,8 @@ flipped to maximize spread for eddy current correction, b0s are interleaved at equal spacing and the non-b0 samples are finally shuffled to minimize the total diffusion gradient amplitude over a few TR. + +Formally: scil_generate_gradient_sampling.py """ import argparse @@ -157,9 +159,9 @@ def main(): if add_at_least_a_b0: bvals.append(args.b0_value) bvecs, shell_idx, nb_b0s = add_b0s_to_bvecs(bvecs, shell_idx, - start_b0=b0_start, - b0_every=args.b0_every, - finish_b0=args.b0_end) + start_b0=b0_start, + b0_every=args.b0_every, + finish_b0=args.b0_end) logging.info(' Interleaved {} b0s'.format(nb_b0s)) else: logging.info(" Careful! No b0 added!") diff --git a/scripts/scil_gradients_modify_axes.py b/scripts/scil_gradients_modify_axes.py index eb2b89eb78..42c32e09d8 100755 --- a/scripts/scil_gradients_modify_axes.py +++ b/scripts/scil_gradients_modify_axes.py @@ -4,6 +4,8 @@ Flip (ex, x --> -x) or swap (ex, x <-> y) chosen axes of the gradient sampling matrix. Result will be saved in the same format as input gradient sampling file. + +Formally: scil_flip_gradients.py or scil_swap_gradient_axis.py """ import argparse import os diff --git a/scripts/scil_gradients_round_bvals.py b/scripts/scil_gradients_round_bvals.py index 5c4f630192..124f378fbb 100755 --- a/scripts/scil_gradients_round_bvals.py +++ b/scripts/scil_gradients_round_bvals.py @@ -12,6 +12,8 @@ between [1980, 2020] and round them to the value of 2000. >> scil_gradients_round_bvals.py bvals 0 1000 2000 newbvals --tolerance 20 + +Formally: scil_resample_bvals.py """ import argparse diff --git a/scripts/scil_gradients_validate_correct.py b/scripts/scil_gradients_validate_correct.py index de7816fd55..be1a3a305c 100755 --- a/scripts/scil_gradients_validate_correct.py +++ b/scripts/scil_gradients_validate_correct.py @@ -17,6 +17,8 @@ It is also possible to use a file containing multiple principal directions per voxel, given that the amplitude of each direction is also given with the argument --peaks_vals. + +Formally: scil_validate_and_correct_bvecs.py """ import argparse diff --git a/scripts/scil_gradients_validate_correct_eddy.py b/scripts/scil_gradients_validate_correct_eddy.py index 326766c9a6..79f15b82e7 100755 --- a/scripts/scil_gradients_validate_correct_eddy.py +++ b/scripts/scil_gradients_validate_correct_eddy.py @@ -5,6 +5,8 @@ Validate and correct gradients from eddy outputs With full AP-PA eddy outputs a full bvec bval (2x nb of dirs and bval) that doesnt fit with the output dwi (1x nb of dir) + +Formally: scil_validate_and_correct_eddy_gradients.py """ import argparse diff --git a/scripts/scil_labels_combine.py b/scripts/scil_labels_combine.py index 68e74816cc..2e903c9128 100755 --- a/scripts/scil_labels_combine.py +++ b/scripts/scil_labels_combine.py @@ -12,6 +12,8 @@ >>> scil_labels_combine.py slf_labels.nii.gz --volume_ids a2009s_aseg.nii.gz all --volume_ids clean/s1__DKT.nii.gz 1028 2028 + +Formally: scil_combine_labels.py. """ diff --git a/scripts/scil_labels_dilate.py b/scripts/scil_labels_dilate.py index 296f448179..c9160f3242 100755 --- a/scripts/scil_labels_dilate.py +++ b/scripts/scil_labels_dilate.py @@ -13,6 +13,8 @@ >>> scil_labels_dilate.py wmparc_t1.nii.gz wmparc_dil.nii.gz \\ --label_to_fill 0 5001 5002 \\ --label_not_to_dilate 4 43 10 11 12 49 50 51 + +Formally: scil_labels_dilate.py """ import argparse diff --git a/scripts/scil_labels_remove.py b/scripts/scil_labels_remove.py index e451f4a6ea..afcee7da80 100755 --- a/scripts/scil_labels_remove.py +++ b/scripts/scil_labels_remove.py @@ -5,6 +5,8 @@ Script to remove specific labels from an atlas volume. >>> scil_labels_remove.py DKT_labels.nii out_labels.nii.gz -i 5001 5002 + +Formally: scil_remove_labels.py """ diff --git a/scripts/scil_labels_split_volume_by_ids.py b/scripts/scil_labels_split_volume_by_ids.py index 8baa77b8d5..99dc89b2f6 100755 --- a/scripts/scil_labels_split_volume_by_ids.py +++ b/scripts/scil_labels_split_volume_by_ids.py @@ -7,6 +7,8 @@ considered as the background and is ignored. IMPORTANT: your label image must be of an integer type. + +Formally: scil_split_volume_by_ids.py """ import argparse diff --git a/scripts/scil_labels_split_volume_from_lut.py b/scripts/scil_labels_split_volume_from_lut.py index 81008cf594..5f92c59575 100755 --- a/scripts/scil_labels_split_volume_from_lut.py +++ b/scripts/scil_labels_split_volume_from_lut.py @@ -8,6 +8,8 @@ are extracted. IMPORTANT: your label image must be of an integer type. + +Formally: scil_split_volume_by_labels.py """ import argparse diff --git a/scripts/scil_mti_maps_MT.py b/scripts/scil_mti_maps_MT.py index e50b0b8396..8b1fc26f0b 100755 --- a/scripts/scil_mti_maps_MT.py +++ b/scripts/scil_mti_maps_MT.py @@ -52,6 +52,7 @@ --in_mtoff path/to/echo*mtoff.nii.gz --in_mton path/to/echo*mton.nii.gz --in_t1w path/to/echo*T1w.nii.gz +Formally: scil_compute_MT_maps.py """ import argparse diff --git a/scripts/scil_mti_maps_ihMT.py b/scripts/scil_mti_maps_ihMT.py index a49377664a..bcece2c901 100755 --- a/scripts/scil_mti_maps_ihMT.py +++ b/scripts/scil_mti_maps_ihMT.py @@ -61,6 +61,7 @@ If you want to use a single echo add --single_echo to the command line and replace the * with the specific number of the echo. +Formally: scil_compute_ihMT_maps.py """ import argparse diff --git a/scripts/scil_plot_mean_std_per_point.py b/scripts/scil_plot_mean_std_per_point.py index f7cd748039..fba786ec66 100755 --- a/scripts/scil_plot_mean_std_per_point.py +++ b/scripts/scil_plot_mean_std_per_point.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- """ -Plot all mean/std per point for a subject or population json file from +Plot all mean/std per point for a subject or population json file from tractometry-flow. WARNING: For population, the displayed STDs is only showing the variation of the means. It does not account intra-subject STDs. @@ -26,7 +26,7 @@ def _build_arg_parser(): p.add_argument('in_json', help='JSON file containing the mean/std per point. For ' 'example, can be created using ' - 'scil_compute_metrics_along_streamline.') + 'scil_bundle_mean_std.py.') p.add_argument('out_dir', help='Output directory.') @@ -38,8 +38,8 @@ def _build_arg_parser(): 'Avoid unequal plots across datasets, replace missing ' 'data with zeros.') p.add_argument('--display_means', action='store_true', - help='Display the subjects means as semi-transparent line.\n' - 'Poor results when the number of subject is high.') + help='Display the subjects means as semi-transparent line.' + '\nPoor results when the number of subject is high.') p1 = p.add_mutually_exclusive_group() p1.add_argument('--fill_color', diff --git a/scripts/scil_qball_metrics.py b/scripts/scil_qball_metrics.py index e05dcaab1f..08d8da6f41 100755 --- a/scripts/scil_qball_metrics.py +++ b/scripts/scil_qball_metrics.py @@ -13,6 +13,8 @@ See [Descoteaux et al MRM 2007, Aganj et al MRM 2009] for details and [Cote et al MEDIA 2013] for quantitative comparisons. + +Formally: scil_compute_qball_metrics.py """ import argparse import logging @@ -31,7 +33,8 @@ assert_outputs_exist, add_force_b0_arg, validate_nbr_processes) from scilpy.io.image import get_data_as_mask -from scilpy.gradients.bvec_bval_tools import (normalize_bvecs, is_normalized_bvecs, +from scilpy.gradients.bvec_bval_tools import (normalize_bvecs, + is_normalized_bvecs, check_b0_threshold) diff --git a/scripts/scil_rgb_convert.py b/scripts/scil_rgb_convert.py index c360eccb6d..6fc7ce5802 100755 --- a/scripts/scil_rgb_convert.py +++ b/scripts/scil_rgb_convert.py @@ -17,6 +17,8 @@ -Case 1: 3D image, in Trackvis format where each voxel contains a tuple of 3 elements, one for each value (uint8). -Case 2: 4D image where the 4th dimension contains 3 values (uint8). + +Formally: scil_convert_rgb.py """ import argparse @@ -67,7 +69,8 @@ def main(): elif original_im.ndim == 3: converted_im_float = decfa_to_float(original_im) - converted_data_int = np.asanyarray(converted_im_float.dataobj).astype(np.uint8) + converted_data_int = \ + np.asanyarray(converted_im_float.dataobj).astype(np.uint8) converted_im = nib.Nifti1Image(converted_data_int, converted_im_float.affine) diff --git a/scripts/scil_sh_convert.py b/scripts/scil_sh_convert.py index 674eed8c32..e847237199 100755 --- a/scripts/scil_sh_convert.py +++ b/scripts/scil_sh_convert.py @@ -2,11 +2,13 @@ # -*- coding: utf-8 -*- """ - Convert a SH file between the two commonly used bases - ('descoteaux07' or 'tournier07'). The specified basis corresponds to the - input data basis. Note that by default, both legacy 'descoteaux07' and - legacy 'tournier07' bases will be assumed. For more information, see - https://dipy.org/documentation/1.4.0./theory/sh_basis/. +Convert a SH file between the two commonly used bases +('descoteaux07' or 'tournier07'). The specified basis corresponds to the +input data basis. Note that by default, both legacy 'descoteaux07' and +legacy 'tournier07' bases will be assumed. For more information, see +https://dipy.org/documentation/1.4.0./theory/sh_basis/. + +Formally: scil_convert_sh_basis.py """ import argparse @@ -29,7 +31,7 @@ def _build_arg_parser(): help='Input SH filename. (nii or nii.gz)') p.add_argument('out_sh', help='Output SH filename. (nii or nii.gz)') - + p.add_argument('--in_sh_is_not_legacy', action='store_true', help='If set, this means that the input SH are not encoded ' 'with the legacy version of their SH basis.') diff --git a/scripts/scil_sh_fusion.py b/scripts/scil_sh_fusion.py index f38c33a5d1..8cecd7d7a1 100755 --- a/scripts/scil_sh_fusion.py +++ b/scripts/scil_sh_fusion.py @@ -10,6 +10,8 @@ conserving the most relevant information. Based on [1] and [2]. + +Formally: scil_merge_sh.py """ import argparse diff --git a/scripts/scil_sh_to_rish.py b/scripts/scil_sh_to_rish.py index 97070e5101..d477def3fa 100755 --- a/scripts/scil_sh_to_rish.py +++ b/scripts/scil_sh_to_rish.py @@ -17,6 +17,8 @@ [1] Mirzaalian, Hengameh, et al. "Harmonizing diffusion MRI data across multiple sites and scanners." MICCAI 2015. https://scholar.harvard.edu/files/hengameh/files/miccai2015.pdf + +Formally: scil_compute_rish_from_sh.py """ import argparse diff --git a/scripts/scil_sh_to_sf.py b/scripts/scil_sh_to_sf.py index 368c06a442..978867bda8 100755 --- a/scripts/scil_sh_to_sf.py +++ b/scripts/scil_sh_to_sf.py @@ -3,11 +3,14 @@ """ Script to sample SF values from a Spherical Harmonics signal. Outputs a Nifti -file with the SF values and an associated .bvec file with the chosen directions. +file with the SF values and an associated .bvec file with the chosen +directions. If converting from SH to a DWI-like SF volume, --in_bval and --in_b0 need to be provided to concatenate the b0 image to the SF, and to generate the new bvals file. Otherwise, no .bval file will be created. + +Formally: scil_compute_sf_from_sh.py """ import argparse @@ -42,7 +45,8 @@ def _build_arg_parser(): choices=sorted(SPHERE_FILES.keys()), help='Sphere used for the SH to SF projection. ') directions.add_argument('--in_bvec', - help="Directions used for the SH to SF projection.") + help="Directions used for the SH to SF " + "projection.") p.add_argument('--dtype', default="float32", choices=["float32", "float64"], diff --git a/scripts/scil_surface_apply_transform.py b/scripts/scil_surface_apply_transform.py index 6a635d4ac6..2a2aef3a73 100755 --- a/scripts/scil_surface_apply_transform.py +++ b/scripts/scil_surface_apply_transform.py @@ -15,6 +15,8 @@ The script will use the linear affine first and then the warp image from ANTs. The resulting surface should be aligned *b0 world LPS* coordinates (aligned over the b0 in MI-Brain). + +Formally: scil_apply_transform_to_surface.py. """ import argparse diff --git a/scripts/scil_surface_convert.py b/scripts/scil_surface_convert.py index 625f4edc28..bd5f1f4b1c 100755 --- a/scripts/scil_surface_convert.py +++ b/scripts/scil_surface_convert.py @@ -6,6 +6,8 @@ ".vtk", ".vtp", ".ply", ".stl", ".xml", ".obj" > scil_surface_convert.py surf.vtk converted_surf.ply + +Formally: scil_convert_surface.py """ import argparse import os diff --git a/scripts/scil_surface_flip.py b/scripts/scil_surface_flip.py index 793929e282..a3d275bd85 100755 --- a/scripts/scil_surface_flip.py +++ b/scripts/scil_surface_flip.py @@ -11,6 +11,8 @@ !!! important FreeSurfer surfaces must be in their respective folder !!! > mris_convert --to-scanner lh.white lh.white.vtk > scil_surface_flip.py lh.white.vtk lh_white_lps.vtk x y + +Formally: scil_flip_surface.py """ import argparse diff --git a/scripts/scil_surface_smooth.py b/scripts/scil_surface_smooth.py index 783e8f3604..b0b58eee5f 100755 --- a/scripts/scil_surface_smooth.py +++ b/scripts/scil_surface_smooth.py @@ -9,6 +9,8 @@ [1, 10] for a small smoothing [10, 100] for a moderate smoothing [100, 1000] for a big smoothing + +Formally: scil_smooth_surface.py """ import argparse diff --git a/scripts/scil_tracking_local.py b/scripts/scil_tracking_local.py index 92648a693f..eb5cb49cb9 100755 --- a/scripts/scil_tracking_local.py +++ b/scripts/scil_tracking_local.py @@ -43,6 +43,8 @@ [1]: Aydogan, D. B., & Shi, Y. (2020). Parallel transport tractography. IEEE transactions on medical imaging, 40(2), 635-647. + +Formally: scil_compute_local_tracking.py """ import argparse diff --git a/scripts/scil_tracking_local_dev.py b/scripts/scil_tracking_local_dev.py index 51aa613abe..1867c1f88a 100755 --- a/scripts/scil_tracking_local_dev.py +++ b/scripts/scil_tracking_local_dev.py @@ -39,6 +39,8 @@ References: [1] Girard, G., Whittingstall K., Deriche, R., and Descoteaux, M. (2014). Towards quantitative connectivity analysis: reducing tractography biases. Neuroimage, 98, 266-278. + +Formally: scil_compute_local_tracking_dev.py """ import argparse import logging @@ -276,7 +278,7 @@ def main(): # typically produces a lot of outputs! set_sft_logger_level('WARNING') - # Compared with scil_compute_local_tracking, using sft rather than + # Compared with scil_tracking_local, using sft rather than # LazyTractogram to deal with space. # Contrary to scilpy or dipy, where space after tracking is vox, here # space after tracking is voxmm. diff --git a/scripts/scil_tracking_pft.py b/scripts/scil_tracking_pft.py index b668018471..33995ee769 100755 --- a/scripts/scil_tracking_pft.py +++ b/scripts/scil_tracking_pft.py @@ -6,8 +6,7 @@ The tracking is done inside partial volume estimation maps and uses the particle filtering tractography (PFT) algorithm. See -scil_compute_maps_for_particle_filter_tracking.py -to generate PFT required maps. +scil_tracking_pft_maps.py to generate PFT required maps. Streamlines longer than min_length and shorter than max_length are kept. The tracking direction is chosen in the aperture cone defined by the @@ -23,6 +22,8 @@ deterministic algorithm and 0.2mm for probabilitic algorithm. All the input nifti files must be in isotropic resolution. + +Formally: scil_compute_pft.py """ import argparse @@ -63,11 +64,13 @@ def _build_arg_parser(): p.add_argument('in_seed', help='Seeding mask (.nii.gz).') p.add_argument('in_map_include', - help='The probability map (.nii.gz) of ending the streamline\n' - 'and including it in the output (CMC, PFT [1])') + help='The probability map (.nii.gz) of ending the\n' + 'streamline and including it in the output (CMC, PFT ' + '[1])') p.add_argument('map_exclude_file', - help='The probability map (.nii.gz) of ending the streamline\n' - 'and excluding it in the output (CMC, PFT [1]).') + help='The probability map (.nii.gz) of ending the\n' + 'streamline and excluding it in the output (CMC, PFT ' + '[1]).') p.add_argument('out_tractogram', help='Tractogram output file (must be .trk or .tck).') @@ -111,12 +114,14 @@ def _build_arg_parser(): pft_g = p.add_argument_group('PFT options') pft_g.add_argument('--particles', type=int, default=15, - help='Number of particles to use for PFT. [%(default)s]') + help='Number of particles to use for PFT. [%(default)s]' + ) pft_g.add_argument('--back', dest='back_tracking', type=float, default=2., help='Length of PFT back tracking (mm). [%(default)s]') pft_g.add_argument('--forward', dest='forward_tracking', type=float, default=1., - help='Length of PFT forward tracking (mm). [%(default)s]') + help='Length of PFT forward tracking (mm). ' + '[%(default)s]') out_g = p.add_argument_group('Output options') out_g.add_argument('--compress', type=float, @@ -223,13 +228,15 @@ def main(): voxel_size = np.average(map_include_img.header['pixdim'][1:4]) if not args.act: - tissue_classifier = CmcStoppingCriterion(map_include_img.get_fdata(dtype=np.float32), - map_exclude_img.get_fdata(dtype=np.float32), - step_size=args.step_size, - average_voxel_size=voxel_size) + tissue_classifier = CmcStoppingCriterion( + map_include_img.get_fdata(dtype=np.float32), + map_exclude_img.get_fdata(dtype=np.float32), + step_size=args.step_size, + average_voxel_size=voxel_size) else: - tissue_classifier = ActStoppingCriterion(map_include_img.get_fdata(dtype=np.float32), - map_exclude_img.get_fdata(dtype=np.float32)) + tissue_classifier = ActStoppingCriterion( + map_include_img.get_fdata(dtype=np.float32), + map_exclude_img.get_fdata(dtype=np.float32)) if args.npv: nb_seeds = args.npv diff --git a/scripts/scil_tracking_pft_maps.py b/scripts/scil_tracking_pft_maps.py index ad4285fca2..8032a0f455 100755 --- a/scripts/scil_tracking_pft_maps.py +++ b/scripts/scil_tracking_pft_maps.py @@ -9,6 +9,8 @@ References: Girard, G., Whittingstall K., Deriche, R., and Descoteaux, M. (2014). Towards quantitative connectivity analysis: reducing tractography biases. Neuroimage. + +Formally: scil_compute_maps_for_particle_filter_tracking.py """ import argparse diff --git a/scripts/scil_tracking_pft_maps_edit.py b/scripts/scil_tracking_pft_maps_edit.py index 339f9277ed..0e765d49d2 100755 --- a/scripts/scil_tracking_pft_maps_edit.py +++ b/scripts/scil_tracking_pft_maps_edit.py @@ -3,6 +3,8 @@ """ Modify PFT maps to allow PFT tracking in given mask (e.g edema). + +Formally: scil_add_tracking_mask_to_pft_maps.py. """ import argparse diff --git a/scripts/scil_tractogram_apply_transform.py b/scripts/scil_tractogram_apply_transform.py index 9ee189232d..2294877bd2 100755 --- a/scripts/scil_tractogram_apply_transform.py +++ b/scripts/scil_tractogram_apply_transform.py @@ -12,7 +12,7 @@ the bounding box), three strategies are available: 1) default, crash at saving if invalid streamlines are present 2) --keep_invalid, save invalid streamlines. Leave it to the user to run - scil_remove_invalid_streamlines.py if needed. + scil_tractogram_remove_invalid.py if needed. 3) --remove_invalid, automatically remove invalid streamlines before saving. Should not remove more than a few streamlines. 4) --cut_invalid, automatically cut invalid streamlines before saving. @@ -31,6 +31,8 @@ 0GenericAffine.mat ${OUTPUT_NAME} --in_deformation 1Warp.nii.gz --reverse_operation + +Formally: scil_apply_transform_to_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_commit.py b/scripts/scil_tractogram_commit.py index 6e6bde32ae..48dccdd5bb 100755 --- a/scripts/scil_tractogram_commit.py +++ b/scripts/scil_tractogram_commit.py @@ -58,6 +58,8 @@ - Confirm the quality of WM bundles reconstruction (essential tractogram) - Inspect the (N)RMSE map and look for peaks or anomalies - Compare the density map before and after (essential tractogram) + +Formally: scil_run_commit.py """ import argparse diff --git a/scripts/scil_tractogram_compress.py b/scripts/scil_tractogram_compress.py index adcf2e177a..4560666751 100755 --- a/scripts/scil_tractogram_compress.py +++ b/scripts/scil_tractogram_compress.py @@ -6,6 +6,8 @@ The compression threshold represents the maximum distance (in mm) to the original position of the point. + +Formally: scil_compress_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_convert.py b/scripts/scil_tractogram_convert.py index eec511b59e..d0b49ffc06 100755 --- a/scripts/scil_tractogram_convert.py +++ b/scripts/scil_tractogram_convert.py @@ -4,6 +4,8 @@ Conversion of '.tck', '.trk', '.fib', '.vtk' and 'dpy' files using updated file format standard. TRK file always needs a reference file, a NIFTI, for conversion. The FIB file format is in fact a VTK, MITK Diffusion supports it. + +Formally: scil_convert_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_count_streamlines.py b/scripts/scil_tractogram_count_streamlines.py index 72c4c1602b..1b84ca1c3d 100755 --- a/scripts/scil_tractogram_count_streamlines.py +++ b/scripts/scil_tractogram_count_streamlines.py @@ -4,6 +4,8 @@ """ Return the number of streamlines in a tractogram. Only support trk and tck in order to support the lazy loading from nibabel. + +Formally: scil_count_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_cut_streamlines.py b/scripts/scil_tractogram_cut_streamlines.py index 14a41c16f2..cbc27c4830 100755 --- a/scripts/scil_tractogram_cut_streamlines.py +++ b/scripts/scil_tractogram_cut_streamlines.py @@ -14,6 +14,8 @@ bounding box or going from binary mask #1 to binary mask #2. Both scenarios will erase data_per_point and data_per_streamline. + +Formally: scil_cut_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_detect_loops.py b/scripts/scil_tractogram_detect_loops.py index 8305fdbb65..f7d0d99d3d 100755 --- a/scripts/scil_tractogram_detect_loops.py +++ b/scripts/scil_tractogram_detect_loops.py @@ -17,6 +17,8 @@ Reference: QuickBundles based on [Garyfallidis12] Frontiers in Neuroscience, 2012. ---------------------------------------------------------------------------- + +Formally: scil_detect_streamlines_loops.py """ import argparse diff --git a/scripts/scil_tractogram_extract_ushape.py b/scripts/scil_tractogram_extract_ushape.py index e9d93e2b65..a91ea3fabf 100755 --- a/scripts/scil_tractogram_extract_ushape.py +++ b/scripts/scil_tractogram_extract_ushape.py @@ -9,6 +9,8 @@ * 0 it defines straight streamlines * 1 it defines U-fibers * -1 it defines S-fibers + +Formally: scil_extract_ushape.py """ import argparse @@ -66,7 +68,7 @@ def main(): check_tracts_same_format(parser, [args.in_tractogram, args.out_tractogram, args.remaining_tractogram]) - if not(-1 <= args.minU <= 1 and -1 <= args.maxU <= 1): + if not (-1 <= args.minU <= 1 and -1 <= args.maxU <= 1): parser.error('Min-Max ufactor "{},{}" '.format(args.minU, args.maxU) + 'must be between -1 and 1.') diff --git a/scripts/scil_tractogram_filter_by_anatomy.py b/scripts/scil_tractogram_filter_by_anatomy.py index 3e29be605a..b1355c4d77 100755 --- a/scripts/scil_tractogram_filter_by_anatomy.py +++ b/scripts/scil_tractogram_filter_by_anatomy.py @@ -35,11 +35,13 @@ Example usages: # Filter length, looping angle and anatomical ending region ->>> scil_tractogram_filter_anatomically.py tractogram.trk wmparc.nii.gz +>>> scil_tractogram_filter_by_anatomy.py tractogram.trk wmparc.nii.gz path/to/output/directory --minL 20 --maxL 200 -a 300 # Filter only anatomical ending region, with WM dilation and provided csf mask ->>> scil_tractogram_filter_anatomically.py tractogram.trk wmparc.nii.gz +>>> scil_tractogram_filter_by_anatomy.py tractogram.trk wmparc.nii.gz path/to/output/directory --csf_bin csf_bin.nii.gz --ctx_dilation_radius 2 + +Formally: scil_filter_streamlines_anatomically.py """ import argparse diff --git a/scripts/scil_tractogram_filter_by_length.py b/scripts/scil_tractogram_filter_by_length.py index b83d1e87c3..6f73d2346c 100755 --- a/scripts/scil_tractogram_filter_by_length.py +++ b/scripts/scil_tractogram_filter_by_length.py @@ -3,6 +3,8 @@ """ Script to filter streamlines based on their lengths. + +Formally: scil_filter_streamlines_by_length.py """ import argparse @@ -19,7 +21,8 @@ add_verbose_arg, assert_inputs_exist, assert_outputs_exist) -from scilpy.tractograms.streamline_operations import filter_streamlines_by_length +from scilpy.tractograms.streamline_operations import \ + filter_streamlines_by_length def _build_arg_parser(): diff --git a/scripts/scil_tractogram_filter_by_orientation.py b/scripts/scil_tractogram_filter_by_orientation.py index e6677c5f8b..3a38da3b34 100755 --- a/scripts/scil_tractogram_filter_by_orientation.py +++ b/scripts/scil_tractogram_filter_by_orientation.py @@ -14,6 +14,8 @@ Note: we consider that x, y, z are the coordinates of the streamlines; we do not verify if they are aligned with the brain's orientation. + +Formally: scil_filter_streamlines_by_orientation.py """ import argparse diff --git a/scripts/scil_tractogram_filter_by_roi.py b/scripts/scil_tractogram_filter_by_roi.py index 6fcb6869d4..58f4e54da2 100755 --- a/scripts/scil_tractogram_filter_by_roi.py +++ b/scripts/scil_tractogram_filter_by_roi.py @@ -32,6 +32,8 @@ WARNING: DISTANCE is optional and it should be used carefully with large voxel size (e.g > 2.5mm). The value is in voxel for ROIs and in mm for bounding box. Anisotropic data will affect each direction differently + +Formally: scil_filter_tractogram.py """ import argparse @@ -104,8 +106,10 @@ def _build_arg_parser(): '(i.e. drawn_roi mask.nii.gz both_ends include 1).') p.add_argument('--overwrite_distance', nargs='+', action='append', - help='MODE CRITERIA DISTANCE (distance in voxel for ROIs and in mm for bounding box).\n' - 'If set, it will overwrite the distance associated to a specific mode/criteria.') + help='MODE CRITERIA DISTANCE (distance in voxel for ROIs ' + 'and in mm for bounding box).\n' + 'If set, it will overwrite the distance associated to ' + 'a specific mode/criteria.') p.add_argument('--extract_masks_atlas_roi', action='store_true', help='Extract atlas roi masks.') @@ -163,10 +167,12 @@ def prepare_filtering_list(parser, args): else: roi_opt_list.append(roi_opt.strip().split()) - if (len(roi_opt_list[-1]) < 4 or len(roi_opt_list) > 5) and roi_opt_list[-1][0] != 'atlas_roi': + if (len(roi_opt_list[-1]) < 4 or len(roi_opt_list) > 5) and \ + roi_opt_list[-1][0] != 'atlas_roi': logging.error("Please specify 3 or 4 values " "for {} filtering.".format(roi_opt_list[-1][0])) - elif (len(roi_opt_list[-1]) < 5 or len(roi_opt_list) > 6) and roi_opt_list[-1][0] == 'atlas_roi': + elif (len(roi_opt_list[-1]) < 5 or len(roi_opt_list) > 6) and \ + roi_opt_list[-1][0] == 'atlas_roi': logging.error("Please specify 4 or 5 values" " for {} filtering.".format(roi_opt_list[-1][0])) @@ -174,15 +180,18 @@ def prepare_filtering_list(parser, args): for index, roi_opt in enumerate(roi_opt_list): if roi_opt[0] == 'atlas_roi': if len(roi_opt) == 5: - filter_type, filter_arg, _, filter_mode, filter_criteria = roi_opt + filter_type, filter_arg, _, filter_mode, filter_criteria = \ + roi_opt roi_opt_list[index].append(0) else: - filter_type, filter_arg, _, filter_mode, filter_criteria, filter_distance = roi_opt + filter_type, filter_arg, _, filter_mode, filter_criteria, \ + filter_distance = roi_opt elif len(roi_opt) == 4: filter_type, filter_arg, filter_mode, filter_criteria = roi_opt roi_opt_list[index].append(0) else: - filter_type, filter_arg, filter_mode, filter_criteria, filter_distance = roi_opt + filter_type, filter_arg, filter_mode, filter_criteria, \ + filter_distance = roi_opt if filter_type not in ['x_plane', 'y_plane', 'z_plane']: if not os.path.isfile(filter_arg): @@ -210,7 +219,8 @@ def check_overwrite_distance(parser, args): 'It should be MODE CRITERIA DISTANCE.') elif '-'.join([distance[0], distance[1]]) in dict_distance: parser.error('Overwrite distance dictionnary MODE ' - '"{}" has been set multiple times.'.format(distance[0])) + '"{}" has been set multiple times.'.format( + distance[0])) elif distance[0] in MODES and distance[1] in CRITERIA: curr_key = '-'.join([distance[0], distance[1]]) dict_distance[curr_key] = distance[2] @@ -238,7 +248,8 @@ def main(): set_sft_logger_level('WARNING') if overwrite_distance: - logging.debug('Overwrite distance dictionnary {}'.format(overwrite_distance)) + logging.debug('Overwrite distance dictionnary {}'.format( + overwrite_distance)) roi_opt_list, only_filtering_list = prepare_filtering_list(parser, args) o_dict = {} @@ -262,7 +273,8 @@ def main(): filter_type, filter_arg, filter_arg_2, \ filter_mode, filter_criteria, filter_distance = roi_opt else: - filter_type, filter_arg, filter_mode, filter_criteria, filter_distance = roi_opt + filter_type, filter_arg, filter_mode, filter_criteria, \ + filter_distance = roi_opt curr_dict['filename'] = os.path.abspath(filter_arg) curr_dict['type'] = filter_type @@ -278,7 +290,8 @@ def main(): try: filter_distance = int(curr_dict['distance']) except ValueError: - parser.error('Distance filter {} should is not an integer.'.format(curr_dict['distance'])) + parser.error('Distance filter {} should is not an integer.'.format( + curr_dict['distance'])) is_exclude = False if filter_criteria == 'include' else True @@ -296,7 +309,9 @@ def main(): if args.extract_masks_atlas_roi: atlas_roi_item = atlas_roi_item + 1 nib.Nifti1Image(mask.astype(np.uint16), - img.affine).to_filename('mask_atlas_roi_{}.nii.gz'.format(str(atlas_roi_item))) + img.affine).to_filename( + 'mask_atlas_roi_{}.nii.gz'.format( + str(atlas_roi_item))) filtered_sft, kept_ids = filter_grid_roi(sft, mask, filter_mode, is_exclude, diff --git a/scripts/scil_tractogram_fix_trk.py b/scripts/scil_tractogram_fix_trk.py index f63d48ec40..6fae8c847d 100755 --- a/scripts/scil_tractogram_fix_trk.py +++ b/scripts/scil_tractogram_fix_trk.py @@ -38,6 +38,8 @@ WARNING: This script is still experimental, DSI-Studio and Startrack evolve quickly and results may vary depending on the data itself as well as DSI-studio/Startrack version. + +Formally: scil_fix_dsi_studio_trk.py """ import argparse diff --git a/scripts/scil_tractogram_flip.py b/scripts/scil_tractogram_flip.py index 0f53bb2120..1f9eb694d0 100755 --- a/scripts/scil_tractogram_flip.py +++ b/scripts/scil_tractogram_flip.py @@ -7,6 +7,8 @@ IMPORTANT: this script should only be used in case of absolute necessity. It's better to fix the real tools than to force flipping streamlines to have them fit in the tools. + +Formally: scil_flip_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_math.py b/scripts/scil_tractogram_math.py index c372a1ab39..2a009fbb35 100755 --- a/scripts/scil_tractogram_math.py +++ b/scripts/scil_tractogram_math.py @@ -36,6 +36,8 @@ share the same type of metadata. If this is not the case, use the option --no_metadata to strip the metadata from the output. Or --fake_metadata to initialize dummy metadata in the file missing them. + +Formally: scil_streamlines_math.py """ import argparse diff --git a/scripts/scil_tractogram_qbx.py b/scripts/scil_tractogram_qbx.py index b19f02429c..f19628cf37 100755 --- a/scripts/scil_tractogram_qbx.py +++ b/scripts/scil_tractogram_qbx.py @@ -2,8 +2,10 @@ # -*- coding: utf-8 -*- """ - Compute clusters using QuickBundlesX and save them separately. - We cannot know the number of clusters in advance. +Compute clusters using QuickBundlesX and save them separately. +We cannot know the number of clusters in advance. + +Formally: scil_compute_qbx.py """ import argparse diff --git a/scripts/scil_tractogram_register.py b/scripts/scil_tractogram_register.py index cf3f6f2d7c..2f308e0f54 100755 --- a/scripts/scil_tractogram_register.py +++ b/scripts/scil_tractogram_register.py @@ -4,10 +4,12 @@ """ Generate a linear transformation matrix from the registration of 2 tractograms. Typically, this script is run before -scil_apply_transform_to_tractogram.py. +scil_tractogram_apply_transform.py. For more informations on how to use the various registration scripts see the doc/tractogram_registration.md readme file + +Formally: scil_register_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_remove_invalid.py b/scripts/scil_tractogram_remove_invalid.py index 2244bfd8e1..8ddc8d676c 100755 --- a/scripts/scil_tractogram_remove_invalid.py +++ b/scripts/scil_tractogram_remove_invalid.py @@ -8,6 +8,8 @@ The --cut_invalid option will cut streamlines so that their longest segment are within the bounding box + +Formally: scil_remove_invalid_streamlines.py """ import argparse @@ -41,7 +43,8 @@ def _build_arg_parser(): p.add_argument('--remove_single_point', action='store_true', help='Consider single point streamlines invalid.') p.add_argument('--remove_overlapping_points', action='store_true', - help='Consider streamlines with overlapping points invalid.') + help='Consider streamlines with overlapping points invalid.' + ) p.add_argument('--threshold', type=float, default=0.001, help='Maximum distance between two points to be considered' ' overlapping [%(default)s mm].') diff --git a/scripts/scil_tractogram_resample.py b/scripts/scil_tractogram_resample.py index 46969bed01..94d1cdb2c7 100755 --- a/scripts/scil_tractogram_resample.py +++ b/scripts/scil_tractogram_resample.py @@ -25,6 +25,8 @@ $ scil_tractogram_resample.py input.trk 1000 output.trk \ --point_wise_std 0.5 --spline 5 10 --keep_invalid_streamlines $ scil_visualize_bundles.py output.trk --local_coloring --width=0.1 + +Formally: scil_resample_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_resample_nb_points.py b/scripts/scil_tractogram_resample_nb_points.py index 03baab3a5b..46ad5b7cb4 100755 --- a/scripts/scil_tractogram_resample_nb_points.py +++ b/scripts/scil_tractogram_resample_nb_points.py @@ -4,6 +4,8 @@ """ Script to resample a set of streamlines to either a new number of points per streamline or to a fixed step size. WARNING: data_per_point is not carried. + +Formally: scil_resample_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_segment_bundles.py b/scripts/scil_tractogram_segment_bundles.py index a8b98b1a63..b7bb9efbe2 100755 --- a/scripts/scil_tractogram_segment_bundles.py +++ b/scripts/scil_tractogram_segment_bundles.py @@ -8,8 +8,8 @@ AntsRegistrationSyNQuick.sh -d 3 -m MODEL_REF -f SUBJ_REF If you are not sure about the transformation 'direction' you can try -scil_recognize_single_bundle.py (with the -v option), a warning will popup if -the provided transformation is not used correctly. +scil_tractogram_segment_bundles.py (with the -v option), a warning will popup +if the provided transformation is not used correctly. The number of folders inside 'models_directories' will increase the number of runs. Each folder is considered like an atlas and bundles inside will initiate @@ -25,6 +25,8 @@ (size of inputs tractogram (GB) * number of processes) < RAM (GB) This is important because many instances of data structures are initialized in parallel and can lead to a RAM overflow. + +Formally: scil_recognize_multi_bundles.py """ import argparse @@ -94,7 +96,8 @@ def main(): args = parser.parse_args() args.in_models_directories = [os.path.join(args.in_directory, x) for x in os.listdir(args.in_directory) - if os.path.isdir(os.path.join(args.in_directory, x))] + if os.path.isdir(os.path.join( + args.in_directory, x))] assert_inputs_exist(parser, args.in_tractograms + [args.in_config_file, @@ -113,8 +116,9 @@ def main(): file_handler = logging.FileHandler(filename=os.path.join(args.out_dir, 'logfile.txt')) - formatter = logging.Formatter(fmt='%(asctime)s, %(name)s %(levelname)s %(message)s', - datefmt='%H:%M:%S') + formatter = logging.Formatter( + fmt='%(asctime)s, %(name)s %(levelname)s %(message)s', + datefmt='%H:%M:%S') file_handler.setFormatter(formatter) logging.getLogger().setLevel(args.log_level) logging.getLogger().addHandler(file_handler) diff --git a/scripts/scil_tractogram_segment_one_bundles.py b/scripts/scil_tractogram_segment_one_bundles.py index 42559847dc..02abf09b86 100755 --- a/scripts/scil_tractogram_segment_one_bundles.py +++ b/scripts/scil_tractogram_segment_one_bundles.py @@ -12,6 +12,8 @@ the right transformation 'direction' a warning will popup. If there is no warning in both case it means the transformation is very close to identity and both 'direction' will work. + +Formally: scil_recognize_single_bundles.py """ import argparse diff --git a/scripts/scil_tractogram_shuffle.py b/scripts/scil_tractogram_shuffle.py index 12f5b1989d..ca9bee335b 100755 --- a/scripts/scil_tractogram_shuffle.py +++ b/scripts/scil_tractogram_shuffle.py @@ -3,6 +3,8 @@ """ Shuffle the ordering of streamlines. + +Formally: scil_shuffle_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_smooth.py b/scripts/scil_tractogram_smooth.py index 8bc83f7377..1c4b39d53d 100755 --- a/scripts/scil_tractogram_smooth.py +++ b/scripts/scil_tractogram_smooth.py @@ -21,6 +21,8 @@ - too low of a sigma (e.g: 1) with a lot of control points (e.g: 15) will create crazy streamlines that could end up out of the bounding box. - data_per_point will be lost. + +Formally: scil_smooth_streamlines.py """ import argparse @@ -51,8 +53,8 @@ def _build_arg_parser(): sub_p = p.add_mutually_exclusive_group(required=True) sub_p.add_argument('--gaussian', metavar='SIGMA', type=int, - help='Sigma for smoothing. Use the value of surronding\n' - 'X,Y,Z points on the streamline to blur the' + help='Sigma for smoothing. Use the value of surronding' + '\nX,Y,Z points on the streamline to blur the' ' streamlines.\nA good sigma choice would be ' 'around 5.') sub_p.add_argument('--spline', nargs=2, metavar=('SIGMA', 'NB_CTRL_POINT'), @@ -95,8 +97,9 @@ def main(): smoothed_streamlines.append(compress_streamlines(tmp_streamlines, args.error_rate)) - smoothed_sft = StatefulTractogram.from_sft(smoothed_streamlines, sft, - data_per_streamline=sft.data_per_streamline) + smoothed_sft = StatefulTractogram.from_sft( + smoothed_streamlines, sft, + data_per_streamline=sft.data_per_streamline) save_tractogram(smoothed_sft, args.out_tractogram) diff --git a/scripts/scil_tractogram_split.py b/scripts/scil_tractogram_split.py index d5cb8fed4b..5cc4ca4ed8 100755 --- a/scripts/scil_tractogram_split.py +++ b/scripts/scil_tractogram_split.py @@ -10,6 +10,8 @@ - sequentially (the first n/nb_chunks streamlines in the first chunk and so on). - randomly, but per Quickbundles clusters. + +Formally: scil_split_tractogram.py """ import argparse import logging diff --git a/scripts/scil_tractogram_uniformize_endpoints.py b/scripts/scil_tractogram_uniformize_endpoints.py index 613f8e5f08..8a320d355a 100755 --- a/scripts/scil_tractogram_uniformize_endpoints.py +++ b/scripts/scil_tractogram_uniformize_endpoints.py @@ -14,6 +14,8 @@ The --target option will use the barycenter of the target mask to define the axis. The target mask can be a binary mask or an atlas. If an atlas is used, labels are expected in the form of --target atlas.nii.gz 2 3 5:7. + +Formally: scil_uniformize_streamlines_endpoints.py """ import argparse @@ -45,8 +47,8 @@ def _build_arg_parser(): method = p.add_mutually_exclusive_group(required=True) method.add_argument('--axis', choices=['x', 'y', 'z'], - help='Match endpoints of the streamlines along this axis.' - '\nSUGGESTION: Commissural = x, Association = y, ' + help='Match endpoints of the streamlines along this ' + 'axis.\nSUGGESTION: Commissural = x, Association = y, ' 'Projection = z') method.add_argument('--auto', action='store_true', help='Match endpoints of the streamlines along an ' @@ -56,8 +58,9 @@ def _build_arg_parser(): 'to a reference unique streamline (centroid).') method.add_argument('--target_roi', nargs='+', help='Provide a target ROI and the labels to use.\n' - 'Align heads to be closest to the mask barycenter.\n' - 'If no labels are provided, all labels will be used.') + 'Align heads to be closest to the mask barycenter' + '.\nIf no labels are provided, all labels will be' + ' used.') p.add_argument('--swap', action='store_true', help='Swap head <-> tail convention. ' 'Can be useful when the reference is not in RAS.') diff --git a/scripts/scil_visualize_seeds.py b/scripts/scil_visualize_seeds.py index bba74ab746..93fb20d7d6 100755 --- a/scripts/scil_visualize_seeds.py +++ b/scripts/scil_visualize_seeds.py @@ -6,8 +6,8 @@ When tractography was run, each streamline produced by the tracking algorithm saved its seeding point (its origin). -The tractogram must have been generated from scil_compute_local/pft_tracking.py -with the --save_seeds option. +The tractogram must have been generated from scil_tracking_local or +scil_tracking_pft with the --save_seeds option. """ import argparse diff --git a/scripts/scil_volume_apply_transform.py b/scripts/scil_volume_apply_transform.py index dc46b40adc..dfc391837c 100755 --- a/scripts/scil_volume_apply_transform.py +++ b/scripts/scil_volume_apply_transform.py @@ -6,6 +6,8 @@ For more information on how to use the registration script, follow this link: https://scilpy.readthedocs.io/en/latest/documentation/tractogram_registration.html + +Formally: scil_apply_transform_to_image.py. """ import argparse diff --git a/scripts/scil_volume_count_non_zero_voxels.py b/scripts/scil_volume_count_non_zero_voxels.py index ebf252ca24..7430c42b01 100755 --- a/scripts/scil_volume_count_non_zero_voxels.py +++ b/scripts/scil_volume_count_non_zero_voxels.py @@ -8,6 +8,8 @@ (or more) dimension to one voxel, and then find non-zero voxels over this. This means that if there is at least one non-zero voxel in the 4th dimension, this voxel of the 3D volume will be considered as non-zero. + +Formally: scil_count_non_zero_voxels.py """ import argparse diff --git a/scripts/scil_volume_crop.py b/scripts/scil_volume_crop.py index c1c6f381e6..a93cf14ee4 100755 --- a/scripts/scil_volume_crop.py +++ b/scripts/scil_volume_crop.py @@ -9,6 +9,8 @@ Warning: This works well on masked images (like with FSL-Bet) volumes since it's looking for non-zero data. Therefore, you should validate the results on other types of images that haven't been masked. + +Formally: scil_crop_volume.py """ import argparse diff --git a/scripts/scil_volume_flip.py b/scripts/scil_volume_flip.py index e0bb96f891..49562d378b 100755 --- a/scripts/scil_volume_flip.py +++ b/scripts/scil_volume_flip.py @@ -2,6 +2,8 @@ """ Flip the volume according to the specified axis. + +Formally: scil_flip_volume.py """ import argparse diff --git a/scripts/scil_volume_math.py b/scripts/scil_volume_math.py index 508380a66e..c130d0d7f6 100755 --- a/scripts/scil_volume_math.py +++ b/scripts/scil_volume_math.py @@ -10,7 +10,9 @@ Some operations such as multiplication or addition accept float value as parameters instead of images. -> scil_image_math.py multiplication img.nii.gz 10 mult_10.nii.gz +> scil_volume_math.py multiplication img.nii.gz 10 mult_10.nii.gz + +Formally: scil_image_math.py """ import argparse diff --git a/scripts/scil_volume_remove_outliers_ransac.py b/scripts/scil_volume_remove_outliers_ransac.py index 3fb5ac9400..c6dafcc24e 100755 --- a/scripts/scil_volume_remove_outliers_ransac.py +++ b/scripts/scil_volume_remove_outliers_ransac.py @@ -6,6 +6,8 @@ The RANSAC algorithm parameters are sensitive to the input data. NOTE: Current default parameters are tuned for ad/md/rd images only. + +Formally: scil_remove_outliers_ransac.py """ import argparse diff --git a/scripts/scil_volume_resample.py b/scripts/scil_volume_resample.py index 88ac7d0b4e..61ded50fc7 100755 --- a/scripts/scil_volume_resample.py +++ b/scripts/scil_volume_resample.py @@ -4,6 +4,8 @@ """ Script to resample a dataset to match the resolution of another reference dataset or to the resolution specified as in argument. + +Formally: scil_resample_volume.py """ import argparse diff --git a/scripts/scil_volume_reshape_to_reference.py b/scripts/scil_volume_reshape_to_reference.py index 6f773fef55..fc7e220425 100755 --- a/scripts/scil_volume_reshape_to_reference.py +++ b/scripts/scil_volume_reshape_to_reference.py @@ -6,9 +6,10 @@ This script can be used to align freesurfer/civet output, as .mgz, to the original input image. - ->>> scil_reshape_to_reference.py wmparc.mgz t1.nii.gz wmparc_t1.nii.gz \\ +>>> scil_volume_reshape_to_reference.py wmparc.mgz t1.nii.gz wmparc_t1.nii.gz\\ --interpolation nearest + +Formally: scil_reshape_to_reference.py """ import argparse From 4ce1d4c77ab34826c701ab742c0659789bcf27f6 Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Thu, 14 Dec 2023 13:25:20 -0500 Subject: [PATCH 15/63] FIX: gradients --- scripts/legacy/test_legacy_scripts.py | 2 -- scripts/scil_NODDI_maps.py | 2 +- scripts/scil_freewater_maps.py | 2 +- scripts/scil_tractogram_commit.py | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/scripts/legacy/test_legacy_scripts.py b/scripts/legacy/test_legacy_scripts.py index ad3e7300dc..287612bcef 100644 --- a/scripts/legacy/test_legacy_scripts.py +++ b/scripts/legacy/test_legacy_scripts.py @@ -67,8 +67,6 @@ "scil_concatenate_dwi.py", "scil_connectivity_math.py", "scil_convert_fdf.py", - "scil_convert_gradients_fsl_to_mrtrix.py", - "scil_convert_gradients_mrtrix_to_fsl.py", "scil_convert_json_to_xlsx.py", "scil_convert_rgb.py", "scil_convert_sh_basis.py", diff --git a/scripts/scil_NODDI_maps.py b/scripts/scil_NODDI_maps.py index 3c4999123b..8853dab0d9 100755 --- a/scripts/scil_NODDI_maps.py +++ b/scripts/scil_NODDI_maps.py @@ -110,7 +110,7 @@ def main(): # Generage a scheme file from the bvals and bvecs files tmp_dir = tempfile.TemporaryDirectory() - tmp_scheme_filename = os.path.join(tmp_dir.name, 'gradients.scheme') + tmp_scheme_filename = os.path.join(tmp_dir.name, 'gradients.b') tmp_bval_filename = os.path.join(tmp_dir.name, 'bval') bvals, _ = read_bvals_bvecs(args.in_bval, args.in_bvec) shells_centroids, indices_shells = identify_shells(bvals, diff --git a/scripts/scil_freewater_maps.py b/scripts/scil_freewater_maps.py index e97da79719..80b7d331ad 100755 --- a/scripts/scil_freewater_maps.py +++ b/scripts/scil_freewater_maps.py @@ -118,7 +118,7 @@ def main(): # Generage a scheme file from the bvals and bvecs files tmp_dir = tempfile.TemporaryDirectory() - tmp_scheme_filename = os.path.join(tmp_dir.name, 'gradients.scheme') + tmp_scheme_filename = os.path.join(tmp_dir.name, 'gradients.b') tmp_bval_filename = os.path.join(tmp_dir.name, 'bval') bvals, _ = read_bvals_bvecs(args.in_bval, args.in_bvec) shells_centroids, indices_shells = identify_shells(bvals, diff --git a/scripts/scil_tractogram_commit.py b/scripts/scil_tractogram_commit.py index 6e6bde32ae..fbfbaebdf9 100755 --- a/scripts/scil_tractogram_commit.py +++ b/scripts/scil_tractogram_commit.py @@ -376,7 +376,7 @@ def main(): args.in_tractogram = tmp_tractogram_filename # Writing the scheme file with proper shells - tmp_scheme_filename = os.path.join(tmp_dir.name, 'gradients.scheme') + tmp_scheme_filename = os.path.join(tmp_dir.name, 'gradients.b') tmp_bval_filename = os.path.join(tmp_dir.name, 'bval') bvals, _ = read_bvals_bvecs(args.in_bval, args.in_bvec) shells_centroids, indices_shells = identify_shells(bvals, args.b_thr, From 086839ffe4d816e2bed1141b1c3995f7ade0678d Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 14 Dec 2023 13:37:19 -0500 Subject: [PATCH 16/63] Add back legacy for warning --- .../scil_json_convert_entries_to_xlsx.py | 21 +++++++++++++++++++ scripts/legacy/scil_json_harmonize_entries.py | 21 +++++++++++++++++++ scripts/legacy/scil_json_merge_entries.py | 21 +++++++++++++++++++ 3 files changed, 63 insertions(+) create mode 100755 scripts/legacy/scil_json_convert_entries_to_xlsx.py create mode 100755 scripts/legacy/scil_json_harmonize_entries.py create mode 100755 scripts/legacy/scil_json_merge_entries.py diff --git a/scripts/legacy/scil_json_convert_entries_to_xlsx.py b/scripts/legacy/scil_json_convert_entries_to_xlsx.py new file mode 100755 index 0000000000..871ad8abf6 --- /dev/null +++ b/scripts/legacy/scil_json_convert_entries_to_xlsx.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_json_convert_entries_to_xlsx import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_json_convert_entries_to_xlsx.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_json_convert_entries_to_xlsx.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/legacy/scil_json_harmonize_entries.py b/scripts/legacy/scil_json_harmonize_entries.py new file mode 100755 index 0000000000..07ac6ab7dc --- /dev/null +++ b/scripts/legacy/scil_json_harmonize_entries.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_json_harmonize_entries import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_json_harmonize_entries.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_json_harmonize_entries.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/legacy/scil_json_merge_entries.py b/scripts/legacy/scil_json_merge_entries.py new file mode 100755 index 0000000000..bdc5efa911 --- /dev/null +++ b/scripts/legacy/scil_json_merge_entries.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_json_merge_entries import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_json_merge_entries.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_json_merge_entries.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() From 173f29dd27ae1e6a5c1ca4c05e15e0b53e436d87 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 14 Dec 2023 13:47:03 -0500 Subject: [PATCH 17/63] 2nd round of helps + pep8 --- scripts/scil_connectivity_compute_matrices.py | 9 +++-- ...l_connectivity_hdf5_average_density_map.py | 13 +++++--- scripts/scil_connectivity_normalize.py | 8 +++-- scripts/scil_connectivity_print_filenames.py | 3 +- scripts/scil_connectivity_reorder_rois.py | 2 +- scripts/scil_denoising_nlmeans.py | 2 ++ scripts/scil_dki_metrics.py | 6 ++-- scripts/scil_dwi_prepare_eddy_command.py | 11 ++++--- scripts/scil_dwi_prepare_topup_command.py | 2 ++ scripts/scil_json_convert_to_xlsx.py | 5 ++- scripts/scil_json_harmonize.py | 2 ++ scripts/scil_json_merge.py | 5 ++- ...scil_tractogram_apply_transform_to_hdf5.py | 33 +++++++++++-------- .../scil_tractogram_assign_custom_color.py | 2 ++ .../scil_tractogram_assign_uniform_color.py | 2 ++ scripts/scil_tractogram_commit.py | 3 +- .../scil_tractogram_convert_hdf5_to_trk.py | 8 +++-- scripts/scil_tractogram_seed_density_map.py | 7 ++-- ...togram_segment_bundles_for_connectivity.py | 6 ++-- scripts/scil_visualize_connectivity.py | 20 ++++++----- scripts/scil_volume_stats_in_labels.py | 2 ++ 21 files changed, 99 insertions(+), 52 deletions(-) diff --git a/scripts/scil_connectivity_compute_matrices.py b/scripts/scil_connectivity_compute_matrices.py index 79412a2ed0..a858cada48 100755 --- a/scripts/scil_connectivity_compute_matrices.py +++ b/scripts/scil_connectivity_compute_matrices.py @@ -3,7 +3,8 @@ """ This script computes a variety of measures in the form of connectivity -matrices. This script is made to follow scil_decompose_connectivity and +matrices. This script is made to follow +scil_tractogram_segment_bundles_for_connectivity.py and uses the same labels list as input. The script expects a folder containing all relevants bundles following the @@ -110,7 +111,8 @@ def _processing_wrapper(args): # Precompute to save one transformation, insert later if 'length' in measures_to_compute: streamlines_copy = list(streamlines) - # scil_decompose_connectivity.py requires isotropic voxels + # scil_tractogram_segment_bundles_for_connectivity.py requires + # isotropic voxels mean_length = np.average(length(streamlines_copy))*voxel_sizes[0] # If density is not required, do not compute it @@ -229,7 +231,8 @@ def _build_arg_parser(): formatter_class=argparse.RawTextHelpFormatter,) p.add_argument('in_hdf5', help='Input filename for the hdf5 container (.h5).\n' - 'Obtained from scil_decompose_connectivity.py.') + 'Obtained from ' + 'scil_tractogram_segment_bundles_for_connectivity.py.') p.add_argument('in_labels', help='Labels file name (nifti).\n' 'This generates a NxN connectivity matrix.') diff --git a/scripts/scil_connectivity_hdf5_average_density_map.py b/scripts/scil_connectivity_hdf5_average_density_map.py index bc6f63312e..c53bd97b0c 100755 --- a/scripts/scil_connectivity_hdf5_average_density_map.py +++ b/scripts/scil_connectivity_hdf5_average_density_map.py @@ -3,9 +3,9 @@ """ Compute a density map for each connection from a hdf5 file. -Typically use after scil_decompose_connectivity.py in order to obtain the -average density map of each connection to allow the use of --similarity -in scil_connectivity_compute_matrices.py. +Typically use after scil_tractogram_segment_bundles_for_connectivity.py in +order to obtain the average density map of each connection to allow the use +of --similarity in scil_connectivity_compute_matrices.py. This script is parallelized, but will run much slower on non-SSD if too many processes are used. The output is a directory containing the thousands of @@ -15,6 +15,8 @@ |-- LABEL1_LABEL2.nii.gz |-- [...] |-- LABEL90_LABEL90.nii.gz + +Formally: scil_compute_hdf5_average_density_map.py """ import argparse @@ -40,7 +42,7 @@ def _build_arg_parser(): description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_hdf5', nargs='+', help='List of HDF5 filenames (.h5) from ' - 'scil_decompose_connectivity.py.') + 'scil_tractogram_segment_bundles_for_connectivity.py.') p.add_argument('out_dir', help='Path of the output directory.') @@ -69,7 +71,8 @@ def _average_wrapper(args): dimensions)): raise IOError('{} do not have a compatible header'.format( hdf5_filename)) - # scil_decompose_connectivity.py saves the streamlines in VOX/CORNER + # scil_tractogram_segment_bundles_for_connectivity.py saves the + # streamlines in VOX/CORNER streamlines = reconstruct_streamlines_from_hdf5(hdf5_file, key) if len(streamlines) == 0: continue diff --git a/scripts/scil_connectivity_normalize.py b/scripts/scil_connectivity_normalize.py index 562e971f15..162363063a 100755 --- a/scripts/scil_connectivity_normalize.py +++ b/scripts/scil_connectivity_normalize.py @@ -2,7 +2,8 @@ # -*- coding: utf-8 -*- """ -Normalize a connectivity matrix coming from scil_decompose_connectivity.py. +Normalize a connectivity matrix coming from +scil_tractogram_segment_bundles_for_connectivity.py. 3 categories of normalization are available: -- Edge attributes - length: Multiply each edge by the average bundle length. @@ -31,7 +32,7 @@ - log_10: Apply a base 10 logarithm to all edges weight The volume and length matrix should come from the -scil_decompose_connectivity.py script. +scil_tractogram_segment_bundles_for_connectivity.py script. A review of the type of normalization is available in: Colon-Perez, Luis M., et al. "Dimensionless, scale-invariant, edge weight @@ -137,7 +138,8 @@ def main(): # Node-wise computation are necessary for this type of normalize # Parcel volume and surface normalization require the atlas - # This script should be used directly after scil_decompose_connectivity.py + # This script should be used directly after + # scil_tractogram_segment_bundles_for_connectivity.py if args.parcel_volume or args.parcel_surface: atlas_img = nib.load(atlas_filepath) labels_list = np.loadtxt(labels_filepath) diff --git a/scripts/scil_connectivity_print_filenames.py b/scripts/scil_connectivity_print_filenames.py index 7bcb35ad0f..c18af72e67 100755 --- a/scripts/scil_connectivity_print_filenames.py +++ b/scripts/scil_connectivity_print_filenames.py @@ -61,7 +61,8 @@ def main(): in_label = labels_list[pos_1] out_label = labels_list[pos_2] - # scil_decompose_connectivity.py only save the lower triangular files + # scil_tractogram_segment_bundles_for_connectivity.py only save the + # lower triangular files if out_label < in_label: continue text_file.write('{}_{}.trk\n'.format(in_label, out_label)) diff --git a/scripts/scil_connectivity_reorder_rois.py b/scripts/scil_connectivity_reorder_rois.py index 677e5acf44..3319df18bc 100755 --- a/scripts/scil_connectivity_reorder_rois.py +++ b/scripts/scil_connectivity_reorder_rois.py @@ -10,7 +10,7 @@ The values refer to the coordinates (starting at 0) in the matrix, but if the --labels_list parameter is used, the values will refer to the label which will be converted to the appropriate coordinates. This file must be the same as the -one provided to the scil_decompose_connectivity.py +one provided to the scil_tractogram_segment_bundles_for_connectivity.py. To subsequently use scil_visualize_connectivity.py with a lookup table, you must use a label-based reording json and use --labels_list. diff --git a/scripts/scil_denoising_nlmeans.py b/scripts/scil_denoising_nlmeans.py index 2be6130ae8..e48cd9e205 100755 --- a/scripts/scil_denoising_nlmeans.py +++ b/scripts/scil_denoising_nlmeans.py @@ -3,6 +3,8 @@ """ Script to denoise a dataset with the Non Local Means algorithm. + +Formally: scil_run_nlmeans.py """ import argparse diff --git a/scripts/scil_dki_metrics.py b/scripts/scil_dki_metrics.py index ea937f07ac..161af2e952 100755 --- a/scripts/scil_dki_metrics.py +++ b/scripts/scil_dki_metrics.py @@ -9,9 +9,9 @@ Since the diffusion kurtosis model involves the estimation of a large number of parameters and since the non-Gaussian components of the diffusion signal are more sensitive to artefacts, you should really denoise your DWI volume -before using this DKI script (e.g. scil_run_nlmeans.py). Moreover, to remove -biases due to fiber dispersion, fiber crossings and other mesoscopic properties -of the underlying tissue, MSDKI does a powder-average of DWI for all +before using this DKI script (e.g. scil_denoising_nlmeans.py). Moreover, to +remove biases due to fiber dispersion, fiber crossings and other mesoscopic +properties of the underlying tissue, MSDKI does a powder-average of DWI for all directions, thus removing the orientational dependencies and creating an alternative mean kurtosis map. diff --git a/scripts/scil_dwi_prepare_eddy_command.py b/scripts/scil_dwi_prepare_eddy_command.py index d7fa42d2e0..10efb42e52 100755 --- a/scripts/scil_dwi_prepare_eddy_command.py +++ b/scripts/scil_dwi_prepare_eddy_command.py @@ -6,6 +6,8 @@ b-vectors should be merged together using scil_dwi_concatenate.py. If using topup prior to calling this script, images should be concatenated in the same order as the b0s used with prepare_topup. + +Formally: scil_prepare_eddy_command.py """ import argparse @@ -18,10 +20,9 @@ from scilpy.io.utils import (add_overwrite_arg, add_verbose_arg, assert_fsl_options_exist, assert_inputs_exist) -from scilpy.preprocessing.distortion_correction import (create_acqparams, - create_index, - create_multi_topup_index, - create_non_zero_norm_bvecs) +from scilpy.preprocessing.distortion_correction import \ + (create_acqparams, create_index, create_multi_topup_index, + create_non_zero_norm_bvecs) def _build_arg_parser(): @@ -51,7 +52,7 @@ def _build_arg_parser(): help='Topup output name. ' + 'If given, apply topup during eddy.\n' + 'Should be the same as --out_prefix from ' + - 'scil_prepare_topup_command.py.') + 'scil_dwi_prepare_topup_command.py.') p.add_argument('--topup_params', default='', help='Parameters file (typically named acqparams) ' diff --git a/scripts/scil_dwi_prepare_topup_command.py b/scripts/scil_dwi_prepare_topup_command.py index 9b6138102d..64beb81eb7 100755 --- a/scripts/scil_dwi_prepare_topup_command.py +++ b/scripts/scil_dwi_prepare_topup_command.py @@ -4,6 +4,8 @@ """ Prepare a typical command for topup and create the necessary files. The reversed b0 must be in a different file. + +Formally: scil_prepare_topup_command.py """ import argparse diff --git a/scripts/scil_json_convert_to_xlsx.py b/scripts/scil_json_convert_to_xlsx.py index 2c48e04ef6..967fa49e96 100755 --- a/scripts/scil_json_convert_to_xlsx.py +++ b/scripts/scil_json_convert_to_xlsx.py @@ -1,8 +1,11 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -""" Convert a final aggregated json file to an Excel spreadsheet. Typically +""" +Convert a final aggregated json file to an Excel spreadsheet. Typically used during the tractometry pipeline. + +Formally: scil_convert_json_to_xlsx.py """ import argparse diff --git a/scripts/scil_json_harmonize.py b/scripts/scil_json_harmonize.py index 03300f3c5e..5fdb94ca1c 100755 --- a/scripts/scil_json_harmonize.py +++ b/scripts/scil_json_harmonize.py @@ -7,6 +7,8 @@ This is use only (for now) in Aggregate_All_* portion of tractometry-flow, to counter the problem of missing bundles/metrics/lesions between subjects. + +Formally: scil_harmonize_json.py """ import argparse diff --git a/scripts/scil_json_merge.py b/scripts/scil_json_merge.py index 81db336436..9051e8a905 100755 --- a/scripts/scil_json_merge.py +++ b/scripts/scil_json_merge.py @@ -1,9 +1,12 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -""" Merge multiple json file into a single one. +""" +Merge multiple json file into a single one. the --keep_separate option will add an entry for each file, the basename will become the key. + +Formally: scil_merge_json.py """ import argparse diff --git a/scripts/scil_tractogram_apply_transform_to_hdf5.py b/scripts/scil_tractogram_apply_transform_to_hdf5.py index 8966061f15..6b3d8cc136 100755 --- a/scripts/scil_tractogram_apply_transform_to_hdf5.py +++ b/scripts/scil_tractogram_apply_transform_to_hdf5.py @@ -8,6 +8,7 @@ For more information on how to use the registration script, follow this link: https://scilpy.readthedocs.io/en/latest/documentation/tractogram_registration.html +Formally: scil_apply_transform_to_hdf5.py """ import argparse @@ -103,25 +104,27 @@ def main(): moving_sft.data_per_streamline[dps_key] \ = in_hdf5_file[key][dps_key] - new_sft = transform_warp_sft(moving_sft, transfo, target_img, - inverse=args.inverse, - deformation_data=deformation_data, - reverse_op=args.reverse_operation, - remove_invalid=not args.cut_invalid, - cut_invalid=args.cut_invalid) + new_sft = transform_warp_sft( + moving_sft, transfo, target_img, + inverse=args.inverse, + deformation_data=deformation_data, + reverse_op=args.reverse_operation, + remove_invalid=not args.cut_invalid, + cut_invalid=args.cut_invalid) new_sft.to_vox() new_sft.to_corner() - affine, dimensions, voxel_sizes, voxel_order = get_reference_info( - target_img) + affine, dimensions, voxel_sizes, voxel_order = \ + get_reference_info(target_img) out_hdf5_file.attrs['affine'] = affine out_hdf5_file.attrs['dimensions'] = dimensions out_hdf5_file.attrs['voxel_sizes'] = voxel_sizes out_hdf5_file.attrs['voxel_order'] = voxel_order group = out_hdf5_file[key] - group.create_dataset('data', - data=new_sft.streamlines._data.astype(np.float32)) + group.create_dataset( + 'data', + data=new_sft.streamlines._data.astype(np.float32)) group.create_dataset('offsets', data=new_sft.streamlines._offsets) group.create_dataset('lengths', @@ -130,11 +133,13 @@ def main(): if dps_key not in ['data', 'offsets', 'lengths']: if in_hdf5_file[key][dps_key].shape \ == in_hdf5_file[key]['offsets']: - group.create_dataset(dps_key, - data=new_sft.data_per_streamline[dps_key]) + group.create_dataset( + dps_key, + data=new_sft.data_per_streamline[dps_key]) else: - group.create_dataset(dps_key, - data=in_hdf5_file[key][dps_key]) + group.create_dataset( + dps_key, + data=in_hdf5_file[key][dps_key]) if __name__ == "__main__": diff --git a/scripts/scil_tractogram_assign_custom_color.py b/scripts/scil_tractogram_assign_custom_color.py index a7e6d17b29..230c95a233 100755 --- a/scripts/scil_tractogram_assign_custom_color.py +++ b/scripts/scil_tractogram_assign_custom_color.py @@ -34,6 +34,8 @@ The script can also be used to color streamlines according to their length using the --along_profile option. The streamlines must be uniformized. + +Formally: scil_assign_custom_color_to_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_assign_uniform_color.py b/scripts/scil_tractogram_assign_uniform_color.py index 5119405024..092bb01811 100755 --- a/scripts/scil_tractogram_assign_uniform_color.py +++ b/scripts/scil_tractogram_assign_uniform_color.py @@ -10,6 +10,8 @@ If called with .tck, the output will always be .trk, because data_per_point has no equivalent in tck file. + +Formally: scil_assign_uniform_color_to_tractograms.py """ import argparse diff --git a/scripts/scil_tractogram_commit.py b/scripts/scil_tractogram_commit.py index 48dccdd5bb..dfe567daae 100755 --- a/scripts/scil_tractogram_commit.py +++ b/scripts/scil_tractogram_commit.py @@ -45,7 +45,8 @@ signal. Streamlines with 0 weight are essentially not necessary according to COMMIT. -COMMIT2 is available only for HDF5 data from scil_decompose_connectivity.py and +COMMIT2 is available only for HDF5 data from +scil_tractogram_segment_bundles_for_connectivity.py and with the --ball_stick option. Use the --commit2 option to activite it, slightly longer computation time. This wrapper offers a simplify way to call COMMIT, but does not allow to use (or fine-tune) every parameters. If you want to use diff --git a/scripts/scil_tractogram_convert_hdf5_to_trk.py b/scripts/scil_tractogram_convert_hdf5_to_trk.py index a86c143628..b36b7ed97b 100755 --- a/scripts/scil_tractogram_convert_hdf5_to_trk.py +++ b/scripts/scil_tractogram_convert_hdf5_to_trk.py @@ -2,7 +2,8 @@ # -*- coding: utf-8 -*- """ -Save individual connection of an hd5f from scil_decompose_connectivity.py. +Save individual connection of an hd5f from +scil_tractogram_segment_bundles_for_connectivity.py. Useful for quality control and visual inspections. It can either save all connections, individual connections specified with @@ -17,6 +18,8 @@ |-- LABEL1_LABEL2.trk |-- [...] |-- LABEL90_LABEL90.trk + +Formally: scil_save_connections_from_hdf5.py """ import argparse @@ -111,7 +114,8 @@ def main(): if args.include_dps: for dps_key in hdf5_file[key].keys(): if dps_key not in ['data', 'offsets', 'lengths']: - sft.data_per_streamline[dps_key] = hdf5_file[key][dps_key] + sft.data_per_streamline[dps_key] = \ + hdf5_file[key][dps_key] save_tractogram(sft, '{}.trk' .format(os.path.join(args.out_dir, key))) diff --git a/scripts/scil_tractogram_seed_density_map.py b/scripts/scil_tractogram_seed_density_map.py index 71e15d04b2..4dbdec5574 100755 --- a/scripts/scil_tractogram_seed_density_map.py +++ b/scripts/scil_tractogram_seed_density_map.py @@ -3,6 +3,8 @@ """ Compute a density map of seeds saved in .trk file. + +Formally: scil_compute_seed_density_map.py """ import argparse @@ -32,8 +34,9 @@ def _build_arg_parser(): metavar='FIXED_VALUE', type=int, nargs='?', const=1, help='If set, will store the same value for all intersected' ' voxels, creating a binary map.\n' - 'When set without a value, 1 is used (and dtype uint8).\n' - 'If a value is given, will be used as the stored value.') + 'When set without a value, 1 is used (and dtype ' + 'uint8).\nIf a value is given, will be used as the ' + 'stored value.') add_overwrite_arg(p) add_bbox_arg(p) diff --git a/scripts/scil_tractogram_segment_bundles_for_connectivity.py b/scripts/scil_tractogram_segment_bundles_for_connectivity.py index df99db6a79..f0e9bdad18 100755 --- a/scripts/scil_tractogram_segment_bundles_for_connectivity.py +++ b/scripts/scil_tractogram_segment_bundles_for_connectivity.py @@ -26,6 +26,8 @@ - 15 minutes without post-processing, only saving final bundles. - 30 minutes with full post-processing, only saving final bundles. - 60 minutes with full post-processing, saving all possible files. + +Formally: scil_decompose_connectivity.py """ import argparse @@ -286,8 +288,8 @@ def main(): if sft is None: sft = load_tractogram_with_reference(parser, args, in_tractogram) if not is_header_compatible(sft, img_labels): - raise IOError('{} and {}do not have a compatible header'.format( - in_tractogram, args.in_labels)) + raise IOError('{} and {} do not have a compatible ' + 'header'.format(in_tractogram, args.in_labels)) else: sft += load_tractogram_with_reference(parser, args, in_tractogram) diff --git a/scripts/scil_visualize_connectivity.py b/scripts/scil_visualize_connectivity.py index 4482f0c3f3..a793caa247 100755 --- a/scripts/scil_visualize_connectivity.py +++ b/scripts/scil_visualize_connectivity.py @@ -3,7 +3,7 @@ """ Script to display a connectivity matrix and adjust the desired visualization. -Made to work with scil_decompose_connectivity.py and +Made to work with scil_tractogram_segment_bundles_for_connectivity.py and scil_connectivity_reorder_rois.py. This script can either display the axis labels as: @@ -22,8 +22,9 @@ thickness of the line represent the 'size/intensity', the greater the value is the thicker the line will be. In order to hide the low values, two options are available: -- Angle threshold + alpha, any connections with a small angle on the chord chart - will be slightly transparent to increase the focus on bigger connections. +- Angle threshold + alpha, any connections with a small angle on the chord + chart will be slightly transparent to increase the focus on bigger + connections. - Percentile, hide any connections with a value below that percentile """ @@ -82,7 +83,8 @@ def _build_arg_parser(): g2.add_argument('--legend_min_max', nargs=2, metavar=('MIN', 'MAX'), type=float, default=None, help='Manually define the min/max of the legend.') - g2.add_argument('--write_values', nargs=2, metavar=('FONT_SIZE', 'DECIMAL'), + g2.add_argument('--write_values', nargs=2, metavar=('FONT_SIZE', + 'DECIMAL'), default=None, type=int, help='Write the values at the center of each node.\n' 'The font size and the rouding parameters can be ' @@ -90,7 +92,8 @@ def _build_arg_parser(): histo = p.add_argument_group(title='Histogram options') histo.add_argument('--histogram', metavar='FILENAME', - help='Compute and display/save an histogram of weights.') + help='Compute and display/save an histogram of weights.' + ) histo.add_argument('--nb_bins', type=int, help='Number of bins to use for the histogram.') histo.add_argument('--exclude_zeros', action='store_true', @@ -98,7 +101,8 @@ def _build_arg_parser(): chord = p.add_argument_group(title='Chord chart options') chord.add_argument('--chord_chart', metavar='FILENAME', - help='Compute and display/save a chord chart of weigth.') + help='Compute and display/save a chord chart of weigth.' + ) chord.add_argument('--percentile_threshold', type=int, default=0, help='Discard connections below that percentile.' '[%(default)s]') @@ -107,8 +111,8 @@ def _build_arg_parser(): 'Use --alpha to set opacity. Value typically' 'between 0.1 and 5 degrees. [%(default)s]') chord.add_argument('--alpha', type=float, default=0.9, - help='Opacity for the smaller angle on the chord (0-1). ' - '[%(default)s]') + help='Opacity for the smaller angle on the chord (0-1).' + ' [%(default)s]') chord.add_argument('--text_size', default=10, type=float, help='Size of the font for the parcels name/number ' '[%(default)s].') diff --git a/scripts/scil_volume_stats_in_labels.py b/scripts/scil_volume_stats_in_labels.py index b3dd711a64..ee4cc45889 100755 --- a/scripts/scil_volume_stats_in_labels.py +++ b/scripts/scil_volume_stats_in_labels.py @@ -5,6 +5,8 @@ (corresponding to an atlas) associated with a specific bundle. Here we want to estimate the seeding attribution to cortical area affected by the bundle + +Formally: scil_compute_seed_by_labels.py """ import argparse From ec9bb920f51a27e437f8b3bdd611d608a9ca37e8 Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 14 Dec 2023 13:50:28 -0500 Subject: [PATCH 18/63] Use old name --- .../scil_json_convert_entries_to_xlsx.py | 21 ------------------- scripts/legacy/scil_json_harmonize_entries.py | 21 ------------------- scripts/legacy/scil_json_merge_entries.py | 21 ------------------- 3 files changed, 63 deletions(-) delete mode 100755 scripts/legacy/scil_json_convert_entries_to_xlsx.py delete mode 100755 scripts/legacy/scil_json_harmonize_entries.py delete mode 100755 scripts/legacy/scil_json_merge_entries.py diff --git a/scripts/legacy/scil_json_convert_entries_to_xlsx.py b/scripts/legacy/scil_json_convert_entries_to_xlsx.py deleted file mode 100755 index 871ad8abf6..0000000000 --- a/scripts/legacy/scil_json_convert_entries_to_xlsx.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -from scilpy.io.deprecator import deprecate_script -from scripts.scil_json_convert_entries_to_xlsx import main as new_main - - -DEPRECATION_MSG = """ -This script has been renamed scil_json_convert_entries_to_xlsx.py. -Please change your existing pipelines accordingly. -""" - - -@deprecate_script("scil_json_convert_entries_to_xlsx.py", - DEPRECATION_MSG, '1.7.0') -def main(): - new_main() - - -if __name__ == "__main__": - main() diff --git a/scripts/legacy/scil_json_harmonize_entries.py b/scripts/legacy/scil_json_harmonize_entries.py deleted file mode 100755 index 07ac6ab7dc..0000000000 --- a/scripts/legacy/scil_json_harmonize_entries.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -from scilpy.io.deprecator import deprecate_script -from scripts.scil_json_harmonize_entries import main as new_main - - -DEPRECATION_MSG = """ -This script has been renamed scil_json_harmonize_entries.py. -Please change your existing pipelines accordingly. -""" - - -@deprecate_script("scil_json_harmonize_entries.py", - DEPRECATION_MSG, '1.7.0') -def main(): - new_main() - - -if __name__ == "__main__": - main() diff --git a/scripts/legacy/scil_json_merge_entries.py b/scripts/legacy/scil_json_merge_entries.py deleted file mode 100755 index bdc5efa911..0000000000 --- a/scripts/legacy/scil_json_merge_entries.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -from scilpy.io.deprecator import deprecate_script -from scripts.scil_json_merge_entries import main as new_main - - -DEPRECATION_MSG = """ -This script has been renamed scil_json_merge_entries.py. -Please change your existing pipelines accordingly. -""" - - -@deprecate_script("scil_json_merge_entries.py", - DEPRECATION_MSG, '1.7.0') -def main(): - new_main() - - -if __name__ == "__main__": - main() From 0cf8255867e9392af0020ea34203d884110ebc20 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 14 Dec 2023 13:56:24 -0500 Subject: [PATCH 19/63] fixing dump mistake --- scripts/scil_NODDI_maps.py | 2 +- scripts/scil_NODDI_priors.py | 2 +- scripts/scil_aodf_metrics.py | 2 +- scripts/scil_btensor_metrics.py | 2 +- scripts/scil_bundle_compute_centroid.py | 2 +- scripts/scil_bundle_compute_endpoints_map.py | 2 +- scripts/scil_bundle_diameter.py | 2 +- scripts/scil_bundle_filter_by_occurence.py | 2 +- scripts/scil_bundle_generate_priors.py | 2 +- scripts/scil_bundle_label_map.py | 2 +- scripts/scil_bundle_mean_fixel_afd.py | 2 +- scripts/scil_bundle_mean_fixel_afd_from_hdf5.py | 2 +- scripts/scil_bundle_mean_fixel_lobe_metric.py | 2 +- scripts/scil_bundle_mean_std.py | 2 +- scripts/scil_bundle_pairwise_comparison.py | 2 +- scripts/scil_bundle_score_many_bundles_one_tractogram.py | 2 +- scripts/scil_bundle_score_same_bundle_many_segmentations.py | 2 +- scripts/scil_bundle_shape_measures.py | 2 +- scripts/scil_bundle_volume_per_label.py | 2 +- scripts/scil_connectivity_compare_populations.py | 2 +- scripts/scil_connectivity_compute_matrices.py | 2 +- scripts/scil_connectivity_filter.py | 2 +- scripts/scil_connectivity_graph_measures.py | 2 +- scripts/scil_connectivity_hdf5_average_density_map.py | 2 +- scripts/scil_connectivity_normalize.py | 2 +- scripts/scil_connectivity_pairwise_agreement.py | 2 +- scripts/scil_connectivity_print_filenames.py | 2 +- scripts/scil_connectivity_reorder_rois.py | 2 +- scripts/scil_denoising_nlmeans.py | 2 +- scripts/scil_dki_metrics.py | 2 +- scripts/scil_dti_metrics.py | 2 +- scripts/scil_dwi_apply_bias_field.py | 2 +- scripts/scil_dwi_compute_snr.py | 2 +- scripts/scil_dwi_concatenate.py | 2 +- scripts/scil_dwi_extract_b0.py | 2 +- scripts/scil_dwi_extract_shell.py | 2 +- scripts/scil_dwi_powder_average.py | 2 +- scripts/scil_dwi_prepare_eddy_command.py | 2 +- scripts/scil_dwi_prepare_topup_command.py | 2 +- scripts/scil_dwi_reorder_philips.py | 2 +- scripts/scil_dwi_split_by_indices.py | 2 +- scripts/scil_dwi_to_sh.py | 2 +- scripts/scil_fodf_lobe_specific_metrics.py | 2 +- scripts/scil_fodf_max_in_ventricles.py | 2 +- scripts/scil_fodf_memsmt.py | 2 +- scripts/scil_fodf_metrics.py | 2 +- scripts/scil_fodf_msmt.py | 2 +- scripts/scil_fodf_ssst.py | 2 +- scripts/scil_fodf_to_bingham.py | 2 +- scripts/scil_freewater_maps.py | 2 +- scripts/scil_frf_mean.py | 2 +- scripts/scil_frf_memsmt.py | 2 +- scripts/scil_frf_msmt.py | 2 +- scripts/scil_frf_set_diffusivities.py | 2 +- scripts/scil_frf_ssst.py | 2 +- scripts/scil_gradients_apply_transform.py | 2 +- scripts/scil_gradients_convert_fsl_to_mrtrix.py | 2 +- scripts/scil_gradients_convert_mrtrix_to_fsl.py | 2 +- scripts/scil_gradients_generate_sampling.py | 2 +- scripts/scil_gradients_modify_axes.py | 2 +- scripts/scil_gradients_round_bvals.py | 2 +- scripts/scil_gradients_validate_correct.py | 2 +- scripts/scil_gradients_validate_correct_eddy.py | 2 +- scripts/scil_json_convert_to_xlsx.py | 2 +- scripts/scil_json_harmonize.py | 2 +- scripts/scil_json_merge.py | 2 +- scripts/scil_labels_combine.py | 2 +- scripts/scil_labels_dilate.py | 2 +- scripts/scil_labels_remove.py | 2 +- scripts/scil_labels_split_volume_by_ids.py | 2 +- scripts/scil_labels_split_volume_from_lut.py | 2 +- scripts/scil_mti_maps_MT.py | 2 +- scripts/scil_mti_maps_ihMT.py | 2 +- scripts/scil_qball_metrics.py | 2 +- scripts/scil_rgb_convert.py | 2 +- scripts/scil_sh_convert.py | 2 +- scripts/scil_sh_fusion.py | 2 +- scripts/scil_sh_to_rish.py | 2 +- scripts/scil_sh_to_sf.py | 2 +- scripts/scil_surface_apply_transform.py | 2 +- scripts/scil_surface_convert.py | 2 +- scripts/scil_surface_flip.py | 2 +- scripts/scil_surface_smooth.py | 2 +- scripts/scil_tracking_local.py | 2 +- scripts/scil_tracking_local_dev.py | 2 +- scripts/scil_tracking_pft.py | 2 +- scripts/scil_tracking_pft_maps.py | 2 +- scripts/scil_tracking_pft_maps_edit.py | 2 +- scripts/scil_tractogram_apply_transform.py | 2 +- scripts/scil_tractogram_apply_transform_to_hdf5.py | 2 +- scripts/scil_tractogram_assign_custom_color.py | 2 +- scripts/scil_tractogram_assign_uniform_color.py | 2 +- scripts/scil_tractogram_commit.py | 2 +- scripts/scil_tractogram_compress.py | 2 +- scripts/scil_tractogram_convert.py | 2 +- scripts/scil_tractogram_convert_hdf5_to_trk.py | 2 +- scripts/scil_tractogram_count_streamlines.py | 2 +- scripts/scil_tractogram_cut_streamlines.py | 2 +- scripts/scil_tractogram_detect_loops.py | 2 +- scripts/scil_tractogram_extract_ushape.py | 2 +- scripts/scil_tractogram_filter_by_anatomy.py | 2 +- scripts/scil_tractogram_filter_by_length.py | 2 +- scripts/scil_tractogram_filter_by_orientation.py | 2 +- scripts/scil_tractogram_filter_by_roi.py | 2 +- scripts/scil_tractogram_fix_trk.py | 2 +- scripts/scil_tractogram_flip.py | 2 +- scripts/scil_tractogram_math.py | 2 +- scripts/scil_tractogram_qbx.py | 2 +- scripts/scil_tractogram_register.py | 2 +- scripts/scil_tractogram_remove_invalid.py | 2 +- scripts/scil_tractogram_resample.py | 2 +- scripts/scil_tractogram_resample_nb_points.py | 2 +- scripts/scil_tractogram_seed_density_map.py | 2 +- scripts/scil_tractogram_segment_bundles.py | 2 +- scripts/scil_tractogram_segment_bundles_for_connectivity.py | 2 +- scripts/scil_tractogram_segment_one_bundles.py | 2 +- scripts/scil_tractogram_shuffle.py | 2 +- scripts/scil_tractogram_smooth.py | 2 +- scripts/scil_tractogram_split.py | 2 +- scripts/scil_tractogram_uniformize_endpoints.py | 2 +- scripts/scil_volume_apply_transform.py | 2 +- scripts/scil_volume_count_non_zero_voxels.py | 2 +- scripts/scil_volume_crop.py | 2 +- scripts/scil_volume_flip.py | 2 +- scripts/scil_volume_math.py | 2 +- scripts/scil_volume_remove_outliers_ransac.py | 2 +- scripts/scil_volume_resample.py | 2 +- scripts/scil_volume_reshape_to_reference.py | 2 +- scripts/scil_volume_stats_in_labels.py | 2 +- 129 files changed, 129 insertions(+), 129 deletions(-) diff --git a/scripts/scil_NODDI_maps.py b/scripts/scil_NODDI_maps.py index 458bcc6e91..85c327344a 100755 --- a/scripts/scil_NODDI_maps.py +++ b/scripts/scil_NODDI_maps.py @@ -5,7 +5,7 @@ Compute NODDI [1] maps using AMICO. Multi-shell DWI necessary. -Formally: scil_compute_NODDI.py +Formerly: scil_compute_NODDI.py """ import argparse diff --git a/scripts/scil_NODDI_priors.py b/scripts/scil_NODDI_priors.py index 8297ae36fc..87a38552b5 100755 --- a/scripts/scil_NODDI_priors.py +++ b/scripts/scil_NODDI_priors.py @@ -4,7 +4,7 @@ """ Compute the axial (para_diff) and mean (iso_diff) diffusivity priors for NODDI. -Formally: scil_compute_NODDI_priors.py +Formerly: scil_compute_NODDI_priors.py """ import argparse diff --git a/scripts/scil_aodf_metrics.py b/scripts/scil_aodf_metrics.py index c130809eac..c5d8bd324a 100755 --- a/scripts/scil_aodf_metrics.py +++ b/scripts/scil_aodf_metrics.py @@ -23,7 +23,7 @@ given as the ratio of the L2-norm of odd SH coefficients on the L2-norm of all SH coefficients. -Formally: scil_compute_asym_odf_metrics.py +Formerly: scil_compute_asym_odf_metrics.py """ diff --git a/scripts/scil_btensor_metrics.py b/scripts/scil_btensor_metrics.py index 544ac890cb..7eb45e66f0 100755 --- a/scripts/scil_btensor_metrics.py +++ b/scripts/scil_btensor_metrics.py @@ -29,7 +29,7 @@ diffusion MRI data implemented in MATLAB. Proc. Intl. Soc. Mag. Reson. Med. (26), Paris, France, 2018. -Formally: scil_compute_divide.py +Formerly: scil_compute_divide.py """ import argparse diff --git a/scripts/scil_bundle_compute_centroid.py b/scripts/scil_bundle_compute_centroid.py index da960853f6..ac5731940e 100755 --- a/scripts/scil_bundle_compute_centroid.py +++ b/scripts/scil_bundle_compute_centroid.py @@ -4,7 +4,7 @@ """ Compute a single bundle centroid, using an 'infinite' QuickBundles threshold. -Formally: scil_compute_centroid.py +Formerly: scil_compute_centroid.py """ import argparse diff --git a/scripts/scil_bundle_compute_endpoints_map.py b/scripts/scil_bundle_compute_endpoints_map.py index a17b6f30a7..5725ce55be 100755 --- a/scripts/scil_bundle_compute_endpoints_map.py +++ b/scripts/scil_bundle_compute_endpoints_map.py @@ -12,7 +12,7 @@ really two coherent groups. Use the following script to order streamlines: scil_tractogram_uniformize_endpoints.py -Formally: scil_compute_endpoints_map.py +Formerly: scil_compute_endpoints_map.py """ import argparse diff --git a/scripts/scil_bundle_diameter.py b/scripts/scil_bundle_diameter.py index 9e835bff58..40c1a46d78 100755 --- a/scripts/scil_bundle_diameter.py +++ b/scripts/scil_bundle_diameter.py @@ -24,7 +24,7 @@ the script comes with its own VTK rendering to allow exploration of the data. (optional). -Formally: scil_estimate_bundles_diameter.py +Formerly: scil_estimate_bundles_diameter.py """ import argparse diff --git a/scripts/scil_bundle_filter_by_occurence.py b/scripts/scil_bundle_filter_by_occurence.py index 34077e51e0..b7933559a2 100755 --- a/scripts/scil_bundle_filter_by_occurence.py +++ b/scripts/scil_bundle_filter_by_occurence.py @@ -10,7 +10,7 @@ bundle clustering techniques), streamline-wise vote is available to find the streamlines most often included in the bundle. -Formally: scil_perform_majority_vote.py +Formerly: scil_perform_majority_vote.py """ diff --git a/scripts/scil_bundle_generate_priors.py b/scripts/scil_bundle_generate_priors.py index 348c28d40d..3073246258 100755 --- a/scripts/scil_bundle_generate_priors.py +++ b/scripts/scil_bundle_generate_priors.py @@ -6,7 +6,7 @@ The bundle must have been cleaned thorougly before use. The E-FOD can then be used for bundle-specific tractography, but not for FOD metrics. -Formally: scil_generate_priors_from_bundle.py +Formerly: scil_generate_priors_from_bundle.py """ import argparse diff --git a/scripts/scil_bundle_label_map.py b/scripts/scil_bundle_label_map.py index 1c1d1449cc..d00811da14 100755 --- a/scripts/scil_bundle_label_map.py +++ b/scripts/scil_bundle_label_map.py @@ -11,7 +11,7 @@ The number of labels will be the same as the centroid's number of points. -Formally: scil_compute_bundle_voxel_label_map.py +Formerly: scil_compute_bundle_voxel_label_map.py """ import argparse diff --git a/scripts/scil_bundle_mean_fixel_afd.py b/scripts/scil_bundle_mean_fixel_afd.py index 7cd4a9d0ab..47a20c92d7 100755 --- a/scripts/scil_bundle_mean_fixel_afd.py +++ b/scripts/scil_bundle_mean_fixel_afd.py @@ -10,7 +10,7 @@ Please use a bundle file rather than a whole tractogram. -Formally: scil_compute_fixel_afd_from_bundles.py +Formerly: scil_compute_fixel_afd_from_bundles.py """ import argparse diff --git a/scripts/scil_bundle_mean_fixel_afd_from_hdf5.py b/scripts/scil_bundle_mean_fixel_afd_from_hdf5.py index bd0c928484..1080b6649a 100755 --- a/scripts/scil_bundle_mean_fixel_afd_from_hdf5.py +++ b/scripts/scil_bundle_mean_fixel_afd_from_hdf5.py @@ -10,7 +10,7 @@ Please use a hdf5 (.h5) file containing decomposed connections -Formally: scil_compute_fixel_afd_from_hdf5.py +Formerly: scil_compute_fixel_afd_from_hdf5.py """ import argparse diff --git a/scripts/scil_bundle_mean_fixel_lobe_metric.py b/scripts/scil_bundle_mean_fixel_lobe_metric.py index 8c1be29bb5..11568c77fa 100755 --- a/scripts/scil_bundle_mean_fixel_lobe_metric.py +++ b/scripts/scil_bundle_mean_fixel_lobe_metric.py @@ -22,7 +22,7 @@ Please use a bundle file rather than a whole tractogram. -Formally: scil_compute_mean_fixel_obe_metric_from_bundles.py +Formerly: scil_compute_mean_fixel_obe_metric_from_bundles.py """ import argparse diff --git a/scripts/scil_bundle_mean_std.py b/scripts/scil_bundle_mean_std.py index 078a15cdbf..76df89e492 100755 --- a/scripts/scil_bundle_mean_std.py +++ b/scripts/scil_bundle_mean_std.py @@ -14,7 +14,7 @@ Density weighting modifies the contribution of voxel with lower/higher streamline count to reduce influence of spurious streamlines. -Formally: scil_compute_bundle_mean_std_per_point.py or +Formerly: scil_compute_bundle_mean_std_per_point.py or scil_compute_bundle_mean_std.py """ diff --git a/scripts/scil_bundle_pairwise_comparison.py b/scripts/scil_bundle_pairwise_comparison.py index 0f280428c1..31ae4a52d0 100755 --- a/scripts/scil_bundle_pairwise_comparison.py +++ b/scripts/scil_bundle_pairwise_comparison.py @@ -14,7 +14,7 @@ bundle_adjacency_streamlines, dice_streamlines, streamlines_count_overlap, streamlines_count_overreach -Formally: scil_evaluate_bundles_pairwise_agreement_measures.py +Formerly: scil_evaluate_bundles_pairwise_agreement_measures.py """ import argparse diff --git a/scripts/scil_bundle_score_many_bundles_one_tractogram.py b/scripts/scil_bundle_score_many_bundles_one_tractogram.py index 6db5fa1470..b75cf7008f 100755 --- a/scripts/scil_bundle_score_many_bundles_one_tractogram.py +++ b/scripts/scil_bundle_score_many_bundles_one_tractogram.py @@ -37,7 +37,7 @@ } } -Formally: scil_score_bundles.py +Formerly: scil_score_bundles.py """ import argparse import glob diff --git a/scripts/scil_bundle_score_same_bundle_many_segmentations.py b/scripts/scil_bundle_score_same_bundle_many_segmentations.py index ad81f6e4c1..fbc3f12fee 100755 --- a/scripts/scil_bundle_score_same_bundle_many_segmentations.py +++ b/scripts/scil_bundle_score_same_bundle_many_segmentations.py @@ -26,7 +26,7 @@ sensitivity, specificity, precision, accuracy, dice, kappa, youden for both the streamline and voxel representation (if provided). -Formally: scil_evaluate_bundles_binary_classification_measures.py +Formerly: scil_evaluate_bundles_binary_classification_measures.py """ import argparse diff --git a/scripts/scil_bundle_shape_measures.py b/scripts/scil_bundle_shape_measures.py index 1e16a70a58..97953542d8 100755 --- a/scripts/scil_bundle_shape_measures.py +++ b/scripts/scil_bundle_shape_measures.py @@ -31,7 +31,7 @@ The fractal dimension is dependent on the voxel size and the number of voxels. If data comparison is performed, the bundles MUST be in same resolution. -Formally: scil_compute_bundle_volume.py or +Formerly: scil_compute_bundle_volume.py or scil_evaluate_bundles_individual_measures.py """ diff --git a/scripts/scil_bundle_volume_per_label.py b/scripts/scil_bundle_volume_per_label.py index 71cece4d50..9db60e899c 100755 --- a/scripts/scil_bundle_volume_per_label.py +++ b/scripts/scil_bundle_volume_per_label.py @@ -13,7 +13,7 @@ To get the volume and other measures directly from the (whole) bundle, use scil_bundle_shape_measures.py. -Formally: scil_compute_bundle_volume_per_label.py +Formerly: scil_compute_bundle_volume_per_label.py """ import argparse diff --git a/scripts/scil_connectivity_compare_populations.py b/scripts/scil_connectivity_compare_populations.py index 1154f2b2cd..d914847641 100755 --- a/scripts/scil_connectivity_compare_populations.py +++ b/scripts/scil_connectivity_compare_populations.py @@ -17,7 +17,7 @@ matrices before performing the statistical comparison. Reduces the number of statistical tests, useful when using --fdr or --bonferroni. -Formally: scil_compare_connectivity.py +Formerly: scil_compare_connectivity.py """ import argparse diff --git a/scripts/scil_connectivity_compute_matrices.py b/scripts/scil_connectivity_compute_matrices.py index a858cada48..27c920fc09 100755 --- a/scripts/scil_connectivity_compute_matrices.py +++ b/scripts/scil_connectivity_compute_matrices.py @@ -37,7 +37,7 @@ connection. Each connection can be seen as a 'bundle' and then something similar to scil_analyse_lesion_load.py is run for each 'bundle'. -Formally: scil_compute_connectivity.py +Formerly: scil_compute_connectivity.py """ import argparse diff --git a/scripts/scil_connectivity_filter.py b/scripts/scil_connectivity_filter.py index cdb9da02ac..1900495b3b 100755 --- a/scripts/scil_connectivity_filter.py +++ b/scripts/scil_connectivity_filter.py @@ -32,7 +32,7 @@ If the user wants to manually handle the requirements, --keep_condition_count can be used and manually binarized using scil_connectivity_math.py -Formally: scil_filter_connectivity.py +Formerly: scil_filter_connectivity.py """ import argparse diff --git a/scripts/scil_connectivity_graph_measures.py b/scripts/scil_connectivity_graph_measures.py index f6abbca632..4a2122e01f 100755 --- a/scripts/scil_connectivity_graph_measures.py +++ b/scripts/scil_connectivity_graph_measures.py @@ -28,7 +28,7 @@ This script is under the GNU GPLv3 license, for more detail please refer to https://www.gnu.org/licenses/gpl-3.0.en.html -Formally: scil_evaluate_connectivity_graph_measures.py +Formerly: scil_evaluate_connectivity_graph_measures.py """ import argparse diff --git a/scripts/scil_connectivity_hdf5_average_density_map.py b/scripts/scil_connectivity_hdf5_average_density_map.py index c53bd97b0c..13a22dbb56 100755 --- a/scripts/scil_connectivity_hdf5_average_density_map.py +++ b/scripts/scil_connectivity_hdf5_average_density_map.py @@ -16,7 +16,7 @@ |-- [...] |-- LABEL90_LABEL90.nii.gz -Formally: scil_compute_hdf5_average_density_map.py +Formerly: scil_compute_hdf5_average_density_map.py """ import argparse diff --git a/scripts/scil_connectivity_normalize.py b/scripts/scil_connectivity_normalize.py index 162363063a..76f2cf859e 100755 --- a/scripts/scil_connectivity_normalize.py +++ b/scripts/scil_connectivity_normalize.py @@ -41,7 +41,7 @@ However, the proposed weighting of edge presented in this publication is not implemented. -Formally: scil_normalize_connectivity.py +Formerly: scil_normalize_connectivity.py """ import argparse diff --git a/scripts/scil_connectivity_pairwise_agreement.py b/scripts/scil_connectivity_pairwise_agreement.py index 14a4d22a3a..6017c789a4 100755 --- a/scripts/scil_connectivity_pairwise_agreement.py +++ b/scripts/scil_connectivity_pairwise_agreement.py @@ -7,7 +7,7 @@ The computed similarity measures are: sum of square difference and pearson correlation coefficent -Formally: scil_evaluate_connectivity_pairwaise_agreement_measures.py +Formerly: scil_evaluate_connectivity_pairwaise_agreement_measures.py """ import argparse diff --git a/scripts/scil_connectivity_print_filenames.py b/scripts/scil_connectivity_print_filenames.py index c18af72e67..e82a9c1a34 100755 --- a/scripts/scil_connectivity_print_filenames.py +++ b/scripts/scil_connectivity_print_filenames.py @@ -16,7 +16,7 @@ do mv ${SOMEWHERE}/${FILE} ${SOMEWHERE_ELSE}/; done -Formally: scil_print_connectivity_filenames.py +Formerly: scil_print_connectivity_filenames.py """ import argparse diff --git a/scripts/scil_connectivity_reorder_rois.py b/scripts/scil_connectivity_reorder_rois.py index 3319df18bc..e1c081f99b 100755 --- a/scripts/scil_connectivity_reorder_rois.py +++ b/scripts/scil_connectivity_reorder_rois.py @@ -20,7 +20,7 @@ file can then be re-used with --in_ordering. Only one input can be used with this option, we recommand an average streamline count or volume matrix. -Formally: scil_reorder_connectivity.py +Formerly: scil_reorder_connectivity.py """ import argparse diff --git a/scripts/scil_denoising_nlmeans.py b/scripts/scil_denoising_nlmeans.py index e48cd9e205..8588cfce45 100755 --- a/scripts/scil_denoising_nlmeans.py +++ b/scripts/scil_denoising_nlmeans.py @@ -4,7 +4,7 @@ """ Script to denoise a dataset with the Non Local Means algorithm. -Formally: scil_run_nlmeans.py +Formerly: scil_run_nlmeans.py """ import argparse diff --git a/scripts/scil_dki_metrics.py b/scripts/scil_dki_metrics.py index 161af2e952..e73a1740d7 100755 --- a/scripts/scil_dki_metrics.py +++ b/scripts/scil_dki_metrics.py @@ -39,7 +39,7 @@ [1] examples_built/reconst_dki/#example-reconst-dki [2] examples_built/reconst_msdki/#example-reconst-msdki -Formally: scil_compute_kurtosis_metrics.py +Formerly: scil_compute_kurtosis_metrics.py """ import argparse diff --git a/scripts/scil_dti_metrics.py b/scripts/scil_dti_metrics.py index c4e955d0c2..128d17f50d 100755 --- a/scripts/scil_dti_metrics.py +++ b/scripts/scil_dti_metrics.py @@ -21,7 +21,7 @@ [J-D Tournier, S. Mori, A. Leemans. Diffusion Tensor Imaging and Beyond. MRM 2011]. -Formally: scil_compute_dti_metrics.py +Formerly: scil_compute_dti_metrics.py """ import argparse diff --git a/scripts/scil_dwi_apply_bias_field.py b/scripts/scil_dwi_apply_bias_field.py index 2f4e175162..0b3654975a 100755 --- a/scripts/scil_dwi_apply_bias_field.py +++ b/scripts/scil_dwi_apply_bias_field.py @@ -6,7 +6,7 @@ field itself. It ONLY applies an existing bias field. Use the ANTs N4BiasFieldCorrection executable to compute the bias field. -Formally: scil_apply_bias_field_on_dwi.py +Formerly: scil_apply_bias_field_on_dwi.py """ import argparse diff --git a/scripts/scil_dwi_compute_snr.py b/scripts/scil_dwi_compute_snr.py index 73e852b346..e5fbd33e53 100755 --- a/scripts/scil_dwi_compute_snr.py +++ b/scripts/scil_dwi_compute_snr.py @@ -26,7 +26,7 @@ [2] Reymbaut, et al (2021). Magic DIAMOND... https://doi.org/10.1016/j.media.2021.101988 -Formally: scil_snr_in_roi.py +Formerly: scil_snr_in_roi.py """ import argparse diff --git a/scripts/scil_dwi_concatenate.py b/scripts/scil_dwi_concatenate.py index b130d4e66d..472f2d103f 100755 --- a/scripts/scil_dwi_concatenate.py +++ b/scripts/scil_dwi_concatenate.py @@ -5,7 +5,7 @@ Concatenate DWI, bval and bvecs together. File must be specified in matching order. Default data type will be the same as the first input DWI. -Formally: scil_concatenate_dwi.py +Formerly: scil_concatenate_dwi.py """ import argparse diff --git a/scripts/scil_dwi_extract_b0.py b/scripts/scil_dwi_extract_b0.py index 13c71f81ab..7361fbaa69 100755 --- a/scripts/scil_dwi_extract_b0.py +++ b/scripts/scil_dwi_extract_b0.py @@ -6,7 +6,7 @@ The default behavior is to save the first b0 of the series. -Formally: scil_extract_b0.py +Formerly: scil_extract_b0.py """ import argparse diff --git a/scripts/scil_dwi_extract_shell.py b/scripts/scil_dwi_extract_shell.py index f0ccab25b8..f5ee73f764 100755 --- a/scripts/scil_dwi_extract_shell.py +++ b/scripts/scil_dwi_extract_shell.py @@ -15,7 +15,7 @@ setting the --block-size argument. A block size of X means that X DWI volumes are loaded at a time for processing. -Formally: scil_extract_dwi_shell.py +Formerly: scil_extract_dwi_shell.py """ import argparse diff --git a/scripts/scil_dwi_powder_average.py b/scripts/scil_dwi_powder_average.py index 5fd0ef97eb..9b8627fe61 100755 --- a/scripts/scil_dwi_powder_average.py +++ b/scripts/scil_dwi_powder_average.py @@ -13,7 +13,7 @@ Script currently does not take into account the diffusion gradient directions being averaged. -Formally: scil_compute_powder_average.py +Formerly: scil_compute_powder_average.py """ import argparse diff --git a/scripts/scil_dwi_prepare_eddy_command.py b/scripts/scil_dwi_prepare_eddy_command.py index 10efb42e52..bdbf1d357c 100755 --- a/scripts/scil_dwi_prepare_eddy_command.py +++ b/scripts/scil_dwi_prepare_eddy_command.py @@ -7,7 +7,7 @@ topup prior to calling this script, images should be concatenated in the same order as the b0s used with prepare_topup. -Formally: scil_prepare_eddy_command.py +Formerly: scil_prepare_eddy_command.py """ import argparse diff --git a/scripts/scil_dwi_prepare_topup_command.py b/scripts/scil_dwi_prepare_topup_command.py index 64beb81eb7..2a42b59fbd 100755 --- a/scripts/scil_dwi_prepare_topup_command.py +++ b/scripts/scil_dwi_prepare_topup_command.py @@ -5,7 +5,7 @@ Prepare a typical command for topup and create the necessary files. The reversed b0 must be in a different file. -Formally: scil_prepare_topup_command.py +Formerly: scil_prepare_topup_command.py """ import argparse diff --git a/scripts/scil_dwi_reorder_philips.py b/scripts/scil_dwi_reorder_philips.py index 49b0133ca5..b597c7ff94 100755 --- a/scripts/scil_dwi_reorder_philips.py +++ b/scripts/scil_dwi_reorder_philips.py @@ -5,7 +5,7 @@ Re-order gradient according to original table (Philips) This script is not needed for version 5.6 and higher -Formally: scil_reorder_dwi_philips.py +Formerly: scil_reorder_dwi_philips.py """ import argparse diff --git a/scripts/scil_dwi_split_by_indices.py b/scripts/scil_dwi_split_by_indices.py index edadcdaf84..0aee08dedd 100755 --- a/scripts/scil_dwi_split_by_indices.py +++ b/scripts/scil_dwi_split_by_indices.py @@ -11,7 +11,7 @@ extraction does not work. For instance, if one wants to split the x first b-1500s from the rest of the b-1500s in an image, simply put x as an index. -Formally: scil_split_image.py +Formerly: scil_split_image.py """ import argparse diff --git a/scripts/scil_dwi_to_sh.py b/scripts/scil_dwi_to_sh.py index 9d239eef66..3593f67613 100755 --- a/scripts/scil_dwi_to_sh.py +++ b/scripts/scil_dwi_to_sh.py @@ -4,7 +4,7 @@ """ Script to compute the SH coefficient directly on the raw DWI signal. -Formally: scil_compute_sh_from_signal.py +Formerly: scil_compute_sh_from_signal.py """ import argparse diff --git a/scripts/scil_fodf_lobe_specific_metrics.py b/scripts/scil_fodf_lobe_specific_metrics.py index 55d93f523b..df23c46006 100755 --- a/scripts/scil_fodf_lobe_specific_metrics.py +++ b/scripts/scil_fodf_lobe_specific_metrics.py @@ -16,7 +16,7 @@ Using 12 threads, the execution takes 10 minutes for FD estimation for a brain with 1mm isotropic resolution. Other metrics take less than a second. -Formally: scil_compute_lobe_specific_fodf_metrics.py +Formerly: scil_compute_lobe_specific_fodf_metrics.py """ import nibabel as nib diff --git a/scripts/scil_fodf_max_in_ventricles.py b/scripts/scil_fodf_max_in_ventricles.py index fe864d8c35..cbe22545c9 100755 --- a/scripts/scil_fodf_max_in_ventricles.py +++ b/scripts/scil_fodf_max_in_ventricles.py @@ -7,7 +7,7 @@ This allows to clip the noise of fODF using an absolute thresold. -Formally: scil_compute_fodf_max_in_ventricles.py +Formerly: scil_compute_fodf_max_in_ventricles.py """ import argparse diff --git a/scripts/scil_fodf_memsmt.py b/scripts/scil_fodf_memsmt.py index e4a1b4fb07..d8a3c25bbc 100755 --- a/scripts/scil_fodf_memsmt.py +++ b/scripts/scil_fodf_memsmt.py @@ -30,7 +30,7 @@ deconvolution and diffusional variance decomposition via tensor-valued diffusion MRI. Medical Image Analysis (2022) -Formally: scil_compute_memsmt_fodf.py +Formerly: scil_compute_memsmt_fodf.py """ import argparse diff --git a/scripts/scil_fodf_metrics.py b/scripts/scil_fodf_metrics.py index c14fe94ec2..d9cca46ff5 100755 --- a/scripts/scil_fodf_metrics.py +++ b/scripts/scil_fodf_metrics.py @@ -30,7 +30,7 @@ See [Raffelt et al. NeuroImage 2012] and [Dell'Acqua et al HBM 2013] for the definitions. -Formally: scil_compute_fodf_metrics.py +Formerly: scil_compute_fodf_metrics.py """ import argparse diff --git a/scripts/scil_fodf_msmt.py b/scripts/scil_fodf_msmt.py index 6b0f8b5bca..1bed2e9a26 100755 --- a/scripts/scil_fodf_msmt.py +++ b/scripts/scil_fodf_msmt.py @@ -16,7 +16,7 @@ deconvolution for improved analysis of multi-shell diffusion MRI data. Neuroimage (2014) -Formally: scil_compute_msmt_fodf.py +Formerly: scil_compute_msmt_fodf.py """ import argparse diff --git a/scripts/scil_fodf_ssst.py b/scripts/scil_fodf_ssst.py index e3354c74c1..8fbbdb126e 100755 --- a/scripts/scil_fodf_ssst.py +++ b/scripts/scil_fodf_ssst.py @@ -6,7 +6,7 @@ See [Tournier et al. NeuroImage 2007] -Formally: scil_compute_ssst_fodf.py +Formerly: scil_compute_ssst_fodf.py """ import argparse diff --git a/scripts/scil_fodf_to_bingham.py b/scripts/scil_fodf_to_bingham.py index 9629c5d3f1..68a57b999d 100755 --- a/scripts/scil_fodf_to_bingham.py +++ b/scripts/scil_fodf_to_bingham.py @@ -12,7 +12,7 @@ Using 12 threads, the execution takes approximately 30 minutes for a brain with 1mm isotropic resolution. -Formally: scil_fit_bingham_to_fodf.py +Formerly: scil_fit_bingham_to_fodf.py """ import nibabel as nib diff --git a/scripts/scil_freewater_maps.py b/scripts/scil_freewater_maps.py index b2c1c0c59b..f22753bff8 100755 --- a/scripts/scil_freewater_maps.py +++ b/scripts/scil_freewater_maps.py @@ -5,7 +5,7 @@ Compute Free Water maps [1] using AMICO. This script supports both single and multi-shell data. -Formally: scil_compute_freewater.py +Formerly: scil_compute_freewater.py """ import argparse diff --git a/scripts/scil_frf_mean.py b/scripts/scil_frf_mean.py index 1bd0827ad2..7f37b6ab29 100755 --- a/scripts/scil_frf_mean.py +++ b/scripts/scil_frf_mean.py @@ -5,7 +5,7 @@ Compute the mean Fiber Response Function from a set of individually computed Response Functions. -Formally: scil_compute_mean_frf.py +Formerly: scil_compute_mean_frf.py """ import argparse diff --git a/scripts/scil_frf_memsmt.py b/scripts/scil_frf_memsmt.py index e2302acc3f..e236f8b566 100755 --- a/scripts/scil_frf_memsmt.py +++ b/scripts/scil_frf_memsmt.py @@ -33,7 +33,7 @@ deconvolution and diffusional variance decomposition via tensor-valued diffusion MRI. Medical Image Analysis (2022) -Formally: scil_compute_memsmt_frf.py +Formerly: scil_compute_memsmt_frf.py """ import argparse diff --git a/scripts/scil_frf_msmt.py b/scripts/scil_frf_msmt.py index 5ee9cc5f72..0fc42061e9 100755 --- a/scripts/scil_frf_msmt.py +++ b/scripts/scil_frf_msmt.py @@ -25,7 +25,7 @@ deconvolution for improved analysis of multi-shell diffusion MRI data. Neuroimage (2014) -Formally: scil_compute_msmt_frf.py +Formerly: scil_compute_msmt_frf.py """ import argparse diff --git a/scripts/scil_frf_set_diffusivities.py b/scripts/scil_frf_set_diffusivities.py index a0f064c2d1..45d8e6d454 100755 --- a/scripts/scil_frf_set_diffusivities.py +++ b/scripts/scil_frf_set_diffusivities.py @@ -8,7 +8,7 @@ The FRF file is obtained from scil_frf_ssst.py -Formally: scil_set_response_function.py +Formerly: scil_set_response_function.py """ import argparse diff --git a/scripts/scil_frf_ssst.py b/scripts/scil_frf_ssst.py index 10bd8d3e86..f0ea3298bb 100755 --- a/scripts/scil_frf_ssst.py +++ b/scripts/scil_frf_ssst.py @@ -7,7 +7,7 @@ A DTI fit is made, and voxels containing a single fiber population are found using a threshold on the FA. -Formally: scil_compute_ssst_frf.py +Formerly: scil_compute_ssst_frf.py """ import argparse diff --git a/scripts/scil_gradients_apply_transform.py b/scripts/scil_gradients_apply_transform.py index a655fe405a..22ff4afb7a 100755 --- a/scripts/scil_gradients_apply_transform.py +++ b/scripts/scil_gradients_apply_transform.py @@ -4,7 +4,7 @@ """ Transform bvecs using an affine/rigid transformation. -Formally: scil_apply_transform_to_bvecs.py. +Formerly: scil_apply_transform_to_bvecs.py. """ import argparse diff --git a/scripts/scil_gradients_convert_fsl_to_mrtrix.py b/scripts/scil_gradients_convert_fsl_to_mrtrix.py index 05dce061ee..50d54fec1a 100755 --- a/scripts/scil_gradients_convert_fsl_to_mrtrix.py +++ b/scripts/scil_gradients_convert_fsl_to_mrtrix.py @@ -4,7 +4,7 @@ """ Script to convert bval/bvec FSL style to MRtrix style. -Formally: scil_convert_gradients_fsl_to_mrtrix.py +Formerly: scil_convert_gradients_fsl_to_mrtrix.py """ import argparse diff --git a/scripts/scil_gradients_convert_mrtrix_to_fsl.py b/scripts/scil_gradients_convert_mrtrix_to_fsl.py index 669f4ce53a..369007d78b 100755 --- a/scripts/scil_gradients_convert_mrtrix_to_fsl.py +++ b/scripts/scil_gradients_convert_mrtrix_to_fsl.py @@ -3,7 +3,7 @@ """ Script to convert bval/bvec MRtrix style to FSL style. -Formally: scil_convert_gradients_mrtrix_to_fsl.py +Formerly: scil_convert_gradients_mrtrix_to_fsl.py """ import argparse diff --git a/scripts/scil_gradients_generate_sampling.py b/scripts/scil_gradients_generate_sampling.py index 2cff7d2fe4..14c9d939b8 100755 --- a/scripts/scil_gradients_generate_sampling.py +++ b/scripts/scil_gradients_generate_sampling.py @@ -10,7 +10,7 @@ equal spacing and the non-b0 samples are finally shuffled to minimize the total diffusion gradient amplitude over a few TR. -Formally: scil_generate_gradient_sampling.py +Formerly: scil_generate_gradient_sampling.py """ import argparse diff --git a/scripts/scil_gradients_modify_axes.py b/scripts/scil_gradients_modify_axes.py index 42c32e09d8..d436aab793 100755 --- a/scripts/scil_gradients_modify_axes.py +++ b/scripts/scil_gradients_modify_axes.py @@ -5,7 +5,7 @@ matrix. Result will be saved in the same format as input gradient sampling file. -Formally: scil_flip_gradients.py or scil_swap_gradient_axis.py +Formerly: scil_flip_gradients.py or scil_swap_gradient_axis.py """ import argparse import os diff --git a/scripts/scil_gradients_round_bvals.py b/scripts/scil_gradients_round_bvals.py index 124f378fbb..ea9402e03c 100755 --- a/scripts/scil_gradients_round_bvals.py +++ b/scripts/scil_gradients_round_bvals.py @@ -13,7 +13,7 @@ >> scil_gradients_round_bvals.py bvals 0 1000 2000 newbvals --tolerance 20 -Formally: scil_resample_bvals.py +Formerly: scil_resample_bvals.py """ import argparse diff --git a/scripts/scil_gradients_validate_correct.py b/scripts/scil_gradients_validate_correct.py index be1a3a305c..86e46a42a4 100755 --- a/scripts/scil_gradients_validate_correct.py +++ b/scripts/scil_gradients_validate_correct.py @@ -18,7 +18,7 @@ voxel, given that the amplitude of each direction is also given with the argument --peaks_vals. -Formally: scil_validate_and_correct_bvecs.py +Formerly: scil_validate_and_correct_bvecs.py """ import argparse diff --git a/scripts/scil_gradients_validate_correct_eddy.py b/scripts/scil_gradients_validate_correct_eddy.py index 79f15b82e7..4bc033b0f7 100755 --- a/scripts/scil_gradients_validate_correct_eddy.py +++ b/scripts/scil_gradients_validate_correct_eddy.py @@ -6,7 +6,7 @@ With full AP-PA eddy outputs a full bvec bval (2x nb of dirs and bval) that doesnt fit with the output dwi (1x nb of dir) -Formally: scil_validate_and_correct_eddy_gradients.py +Formerly: scil_validate_and_correct_eddy_gradients.py """ import argparse diff --git a/scripts/scil_json_convert_to_xlsx.py b/scripts/scil_json_convert_to_xlsx.py index 967fa49e96..ddc5386cef 100755 --- a/scripts/scil_json_convert_to_xlsx.py +++ b/scripts/scil_json_convert_to_xlsx.py @@ -5,7 +5,7 @@ Convert a final aggregated json file to an Excel spreadsheet. Typically used during the tractometry pipeline. -Formally: scil_convert_json_to_xlsx.py +Formerly: scil_convert_json_to_xlsx.py """ import argparse diff --git a/scripts/scil_json_harmonize.py b/scripts/scil_json_harmonize.py index 5fdb94ca1c..45c06ffd30 100755 --- a/scripts/scil_json_harmonize.py +++ b/scripts/scil_json_harmonize.py @@ -8,7 +8,7 @@ This is use only (for now) in Aggregate_All_* portion of tractometry-flow, to counter the problem of missing bundles/metrics/lesions between subjects. -Formally: scil_harmonize_json.py +Formerly: scil_harmonize_json.py """ import argparse diff --git a/scripts/scil_json_merge.py b/scripts/scil_json_merge.py index 9051e8a905..20ce3132e3 100755 --- a/scripts/scil_json_merge.py +++ b/scripts/scil_json_merge.py @@ -6,7 +6,7 @@ the --keep_separate option will add an entry for each file, the basename will become the key. -Formally: scil_merge_json.py +Formerly: scil_merge_json.py """ import argparse diff --git a/scripts/scil_labels_combine.py b/scripts/scil_labels_combine.py index 2e903c9128..c3adb6fbd3 100755 --- a/scripts/scil_labels_combine.py +++ b/scripts/scil_labels_combine.py @@ -13,7 +13,7 @@ --volume_ids a2009s_aseg.nii.gz all --volume_ids clean/s1__DKT.nii.gz 1028 2028 -Formally: scil_combine_labels.py. +Formerly: scil_combine_labels.py. """ diff --git a/scripts/scil_labels_dilate.py b/scripts/scil_labels_dilate.py index c9160f3242..5707efb5e3 100755 --- a/scripts/scil_labels_dilate.py +++ b/scripts/scil_labels_dilate.py @@ -14,7 +14,7 @@ --label_to_fill 0 5001 5002 \\ --label_not_to_dilate 4 43 10 11 12 49 50 51 -Formally: scil_labels_dilate.py +Formerly: scil_labels_dilate.py """ import argparse diff --git a/scripts/scil_labels_remove.py b/scripts/scil_labels_remove.py index afcee7da80..0110131f51 100755 --- a/scripts/scil_labels_remove.py +++ b/scripts/scil_labels_remove.py @@ -6,7 +6,7 @@ >>> scil_labels_remove.py DKT_labels.nii out_labels.nii.gz -i 5001 5002 -Formally: scil_remove_labels.py +Formerly: scil_remove_labels.py """ diff --git a/scripts/scil_labels_split_volume_by_ids.py b/scripts/scil_labels_split_volume_by_ids.py index 99dc89b2f6..acc4dc34e7 100755 --- a/scripts/scil_labels_split_volume_by_ids.py +++ b/scripts/scil_labels_split_volume_by_ids.py @@ -8,7 +8,7 @@ IMPORTANT: your label image must be of an integer type. -Formally: scil_split_volume_by_ids.py +Formerly: scil_split_volume_by_ids.py """ import argparse diff --git a/scripts/scil_labels_split_volume_from_lut.py b/scripts/scil_labels_split_volume_from_lut.py index 5f92c59575..5c925f8056 100755 --- a/scripts/scil_labels_split_volume_from_lut.py +++ b/scripts/scil_labels_split_volume_from_lut.py @@ -9,7 +9,7 @@ IMPORTANT: your label image must be of an integer type. -Formally: scil_split_volume_by_labels.py +Formerly: scil_split_volume_by_labels.py """ import argparse diff --git a/scripts/scil_mti_maps_MT.py b/scripts/scil_mti_maps_MT.py index 8b1fc26f0b..19f1e42cd0 100755 --- a/scripts/scil_mti_maps_MT.py +++ b/scripts/scil_mti_maps_MT.py @@ -52,7 +52,7 @@ --in_mtoff path/to/echo*mtoff.nii.gz --in_mton path/to/echo*mton.nii.gz --in_t1w path/to/echo*T1w.nii.gz -Formally: scil_compute_MT_maps.py +Formerly: scil_compute_MT_maps.py """ import argparse diff --git a/scripts/scil_mti_maps_ihMT.py b/scripts/scil_mti_maps_ihMT.py index bcece2c901..2946120993 100755 --- a/scripts/scil_mti_maps_ihMT.py +++ b/scripts/scil_mti_maps_ihMT.py @@ -61,7 +61,7 @@ If you want to use a single echo add --single_echo to the command line and replace the * with the specific number of the echo. -Formally: scil_compute_ihMT_maps.py +Formerly: scil_compute_ihMT_maps.py """ import argparse diff --git a/scripts/scil_qball_metrics.py b/scripts/scil_qball_metrics.py index 08d8da6f41..642e641d20 100755 --- a/scripts/scil_qball_metrics.py +++ b/scripts/scil_qball_metrics.py @@ -14,7 +14,7 @@ See [Descoteaux et al MRM 2007, Aganj et al MRM 2009] for details and [Cote et al MEDIA 2013] for quantitative comparisons. -Formally: scil_compute_qball_metrics.py +Formerly: scil_compute_qball_metrics.py """ import argparse import logging diff --git a/scripts/scil_rgb_convert.py b/scripts/scil_rgb_convert.py index 6fc7ce5802..1078861cc5 100755 --- a/scripts/scil_rgb_convert.py +++ b/scripts/scil_rgb_convert.py @@ -18,7 +18,7 @@ tuple of 3 elements, one for each value (uint8). -Case 2: 4D image where the 4th dimension contains 3 values (uint8). -Formally: scil_convert_rgb.py +Formerly: scil_convert_rgb.py """ import argparse diff --git a/scripts/scil_sh_convert.py b/scripts/scil_sh_convert.py index e847237199..efd3c462b3 100755 --- a/scripts/scil_sh_convert.py +++ b/scripts/scil_sh_convert.py @@ -8,7 +8,7 @@ legacy 'tournier07' bases will be assumed. For more information, see https://dipy.org/documentation/1.4.0./theory/sh_basis/. -Formally: scil_convert_sh_basis.py +Formerly: scil_convert_sh_basis.py """ import argparse diff --git a/scripts/scil_sh_fusion.py b/scripts/scil_sh_fusion.py index 8cecd7d7a1..4f94b48405 100755 --- a/scripts/scil_sh_fusion.py +++ b/scripts/scil_sh_fusion.py @@ -11,7 +11,7 @@ Based on [1] and [2]. -Formally: scil_merge_sh.py +Formerly: scil_merge_sh.py """ import argparse diff --git a/scripts/scil_sh_to_rish.py b/scripts/scil_sh_to_rish.py index d477def3fa..1f729aa734 100755 --- a/scripts/scil_sh_to_rish.py +++ b/scripts/scil_sh_to_rish.py @@ -18,7 +18,7 @@ multiple sites and scanners." MICCAI 2015. https://scholar.harvard.edu/files/hengameh/files/miccai2015.pdf -Formally: scil_compute_rish_from_sh.py +Formerly: scil_compute_rish_from_sh.py """ import argparse diff --git a/scripts/scil_sh_to_sf.py b/scripts/scil_sh_to_sf.py index 978867bda8..a889b1f9a5 100755 --- a/scripts/scil_sh_to_sf.py +++ b/scripts/scil_sh_to_sf.py @@ -10,7 +10,7 @@ to be provided to concatenate the b0 image to the SF, and to generate the new bvals file. Otherwise, no .bval file will be created. -Formally: scil_compute_sf_from_sh.py +Formerly: scil_compute_sf_from_sh.py """ import argparse diff --git a/scripts/scil_surface_apply_transform.py b/scripts/scil_surface_apply_transform.py index 2a2aef3a73..6ada084da7 100755 --- a/scripts/scil_surface_apply_transform.py +++ b/scripts/scil_surface_apply_transform.py @@ -16,7 +16,7 @@ The resulting surface should be aligned *b0 world LPS* coordinates (aligned over the b0 in MI-Brain). -Formally: scil_apply_transform_to_surface.py. +Formerly: scil_apply_transform_to_surface.py. """ import argparse diff --git a/scripts/scil_surface_convert.py b/scripts/scil_surface_convert.py index bd5f1f4b1c..de3d4589d5 100755 --- a/scripts/scil_surface_convert.py +++ b/scripts/scil_surface_convert.py @@ -7,7 +7,7 @@ > scil_surface_convert.py surf.vtk converted_surf.ply -Formally: scil_convert_surface.py +Formerly: scil_convert_surface.py """ import argparse import os diff --git a/scripts/scil_surface_flip.py b/scripts/scil_surface_flip.py index a3d275bd85..d468fe7af5 100755 --- a/scripts/scil_surface_flip.py +++ b/scripts/scil_surface_flip.py @@ -12,7 +12,7 @@ > mris_convert --to-scanner lh.white lh.white.vtk > scil_surface_flip.py lh.white.vtk lh_white_lps.vtk x y -Formally: scil_flip_surface.py +Formerly: scil_flip_surface.py """ import argparse diff --git a/scripts/scil_surface_smooth.py b/scripts/scil_surface_smooth.py index b0b58eee5f..66974b4c03 100755 --- a/scripts/scil_surface_smooth.py +++ b/scripts/scil_surface_smooth.py @@ -10,7 +10,7 @@ [10, 100] for a moderate smoothing [100, 1000] for a big smoothing -Formally: scil_smooth_surface.py +Formerly: scil_smooth_surface.py """ import argparse diff --git a/scripts/scil_tracking_local.py b/scripts/scil_tracking_local.py index eb5cb49cb9..03c32fb6aa 100755 --- a/scripts/scil_tracking_local.py +++ b/scripts/scil_tracking_local.py @@ -44,7 +44,7 @@ [1]: Aydogan, D. B., & Shi, Y. (2020). Parallel transport tractography. IEEE transactions on medical imaging, 40(2), 635-647. -Formally: scil_compute_local_tracking.py +Formerly: scil_compute_local_tracking.py """ import argparse diff --git a/scripts/scil_tracking_local_dev.py b/scripts/scil_tracking_local_dev.py index 1867c1f88a..f8c17fb24a 100755 --- a/scripts/scil_tracking_local_dev.py +++ b/scripts/scil_tracking_local_dev.py @@ -40,7 +40,7 @@ Descoteaux, M. (2014). Towards quantitative connectivity analysis: reducing tractography biases. Neuroimage, 98, 266-278. -Formally: scil_compute_local_tracking_dev.py +Formerly: scil_compute_local_tracking_dev.py """ import argparse import logging diff --git a/scripts/scil_tracking_pft.py b/scripts/scil_tracking_pft.py index 33995ee769..dafae15cf3 100755 --- a/scripts/scil_tracking_pft.py +++ b/scripts/scil_tracking_pft.py @@ -23,7 +23,7 @@ All the input nifti files must be in isotropic resolution. -Formally: scil_compute_pft.py +Formerly: scil_compute_pft.py """ import argparse diff --git a/scripts/scil_tracking_pft_maps.py b/scripts/scil_tracking_pft_maps.py index 8032a0f455..224787db62 100755 --- a/scripts/scil_tracking_pft_maps.py +++ b/scripts/scil_tracking_pft_maps.py @@ -10,7 +10,7 @@ (2014). Towards quantitative connectivity analysis: reducing tractography biases. Neuroimage. -Formally: scil_compute_maps_for_particle_filter_tracking.py +Formerly: scil_compute_maps_for_particle_filter_tracking.py """ import argparse diff --git a/scripts/scil_tracking_pft_maps_edit.py b/scripts/scil_tracking_pft_maps_edit.py index 0e765d49d2..09163d1b9e 100755 --- a/scripts/scil_tracking_pft_maps_edit.py +++ b/scripts/scil_tracking_pft_maps_edit.py @@ -4,7 +4,7 @@ """ Modify PFT maps to allow PFT tracking in given mask (e.g edema). -Formally: scil_add_tracking_mask_to_pft_maps.py. +Formerly: scil_add_tracking_mask_to_pft_maps.py. """ import argparse diff --git a/scripts/scil_tractogram_apply_transform.py b/scripts/scil_tractogram_apply_transform.py index 2294877bd2..530bfd3ee3 100755 --- a/scripts/scil_tractogram_apply_transform.py +++ b/scripts/scil_tractogram_apply_transform.py @@ -32,7 +32,7 @@ --in_deformation 1Warp.nii.gz --reverse_operation -Formally: scil_apply_transform_to_tractogram.py +Formerly: scil_apply_transform_to_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_apply_transform_to_hdf5.py b/scripts/scil_tractogram_apply_transform_to_hdf5.py index 6b3d8cc136..fe4352f5ef 100755 --- a/scripts/scil_tractogram_apply_transform_to_hdf5.py +++ b/scripts/scil_tractogram_apply_transform_to_hdf5.py @@ -8,7 +8,7 @@ For more information on how to use the registration script, follow this link: https://scilpy.readthedocs.io/en/latest/documentation/tractogram_registration.html -Formally: scil_apply_transform_to_hdf5.py +Formerly: scil_apply_transform_to_hdf5.py """ import argparse diff --git a/scripts/scil_tractogram_assign_custom_color.py b/scripts/scil_tractogram_assign_custom_color.py index 230c95a233..990b551d22 100755 --- a/scripts/scil_tractogram_assign_custom_color.py +++ b/scripts/scil_tractogram_assign_custom_color.py @@ -35,7 +35,7 @@ The script can also be used to color streamlines according to their length using the --along_profile option. The streamlines must be uniformized. -Formally: scil_assign_custom_color_to_tractogram.py +Formerly: scil_assign_custom_color_to_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_assign_uniform_color.py b/scripts/scil_tractogram_assign_uniform_color.py index 092bb01811..669d595837 100755 --- a/scripts/scil_tractogram_assign_uniform_color.py +++ b/scripts/scil_tractogram_assign_uniform_color.py @@ -11,7 +11,7 @@ If called with .tck, the output will always be .trk, because data_per_point has no equivalent in tck file. -Formally: scil_assign_uniform_color_to_tractograms.py +Formerly: scil_assign_uniform_color_to_tractograms.py """ import argparse diff --git a/scripts/scil_tractogram_commit.py b/scripts/scil_tractogram_commit.py index dfe567daae..f32987c191 100755 --- a/scripts/scil_tractogram_commit.py +++ b/scripts/scil_tractogram_commit.py @@ -60,7 +60,7 @@ - Inspect the (N)RMSE map and look for peaks or anomalies - Compare the density map before and after (essential tractogram) -Formally: scil_run_commit.py +Formerly: scil_run_commit.py """ import argparse diff --git a/scripts/scil_tractogram_compress.py b/scripts/scil_tractogram_compress.py index 4560666751..95f7061cc6 100755 --- a/scripts/scil_tractogram_compress.py +++ b/scripts/scil_tractogram_compress.py @@ -7,7 +7,7 @@ The compression threshold represents the maximum distance (in mm) to the original position of the point. -Formally: scil_compress_streamlines.py +Formerly: scil_compress_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_convert.py b/scripts/scil_tractogram_convert.py index d0b49ffc06..58fb03c4d1 100755 --- a/scripts/scil_tractogram_convert.py +++ b/scripts/scil_tractogram_convert.py @@ -5,7 +5,7 @@ format standard. TRK file always needs a reference file, a NIFTI, for conversion. The FIB file format is in fact a VTK, MITK Diffusion supports it. -Formally: scil_convert_tractogram.py +Formerly: scil_convert_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_convert_hdf5_to_trk.py b/scripts/scil_tractogram_convert_hdf5_to_trk.py index b36b7ed97b..4866dc1671 100755 --- a/scripts/scil_tractogram_convert_hdf5_to_trk.py +++ b/scripts/scil_tractogram_convert_hdf5_to_trk.py @@ -19,7 +19,7 @@ |-- [...] |-- LABEL90_LABEL90.trk -Formally: scil_save_connections_from_hdf5.py +Formerly: scil_save_connections_from_hdf5.py """ import argparse diff --git a/scripts/scil_tractogram_count_streamlines.py b/scripts/scil_tractogram_count_streamlines.py index 1b84ca1c3d..2b5ec12edc 100755 --- a/scripts/scil_tractogram_count_streamlines.py +++ b/scripts/scil_tractogram_count_streamlines.py @@ -5,7 +5,7 @@ Return the number of streamlines in a tractogram. Only support trk and tck in order to support the lazy loading from nibabel. -Formally: scil_count_streamlines.py +Formerly: scil_count_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_cut_streamlines.py b/scripts/scil_tractogram_cut_streamlines.py index cbc27c4830..360f50ca34 100755 --- a/scripts/scil_tractogram_cut_streamlines.py +++ b/scripts/scil_tractogram_cut_streamlines.py @@ -15,7 +15,7 @@ Both scenarios will erase data_per_point and data_per_streamline. -Formally: scil_cut_streamlines.py +Formerly: scil_cut_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_detect_loops.py b/scripts/scil_tractogram_detect_loops.py index f7d0d99d3d..db3a815e32 100755 --- a/scripts/scil_tractogram_detect_loops.py +++ b/scripts/scil_tractogram_detect_loops.py @@ -18,7 +18,7 @@ QuickBundles based on [Garyfallidis12] Frontiers in Neuroscience, 2012. ---------------------------------------------------------------------------- -Formally: scil_detect_streamlines_loops.py +Formerly: scil_detect_streamlines_loops.py """ import argparse diff --git a/scripts/scil_tractogram_extract_ushape.py b/scripts/scil_tractogram_extract_ushape.py index a91ea3fabf..0f0b575eab 100755 --- a/scripts/scil_tractogram_extract_ushape.py +++ b/scripts/scil_tractogram_extract_ushape.py @@ -10,7 +10,7 @@ * 1 it defines U-fibers * -1 it defines S-fibers -Formally: scil_extract_ushape.py +Formerly: scil_extract_ushape.py """ import argparse diff --git a/scripts/scil_tractogram_filter_by_anatomy.py b/scripts/scil_tractogram_filter_by_anatomy.py index b1355c4d77..29b89a2651 100755 --- a/scripts/scil_tractogram_filter_by_anatomy.py +++ b/scripts/scil_tractogram_filter_by_anatomy.py @@ -41,7 +41,7 @@ >>> scil_tractogram_filter_by_anatomy.py tractogram.trk wmparc.nii.gz path/to/output/directory --csf_bin csf_bin.nii.gz --ctx_dilation_radius 2 -Formally: scil_filter_streamlines_anatomically.py +Formerly: scil_filter_streamlines_anatomically.py """ import argparse diff --git a/scripts/scil_tractogram_filter_by_length.py b/scripts/scil_tractogram_filter_by_length.py index 6f73d2346c..e4cb643024 100755 --- a/scripts/scil_tractogram_filter_by_length.py +++ b/scripts/scil_tractogram_filter_by_length.py @@ -4,7 +4,7 @@ """ Script to filter streamlines based on their lengths. -Formally: scil_filter_streamlines_by_length.py +Formerly: scil_filter_streamlines_by_length.py """ import argparse diff --git a/scripts/scil_tractogram_filter_by_orientation.py b/scripts/scil_tractogram_filter_by_orientation.py index 3a38da3b34..3c5736e863 100755 --- a/scripts/scil_tractogram_filter_by_orientation.py +++ b/scripts/scil_tractogram_filter_by_orientation.py @@ -15,7 +15,7 @@ Note: we consider that x, y, z are the coordinates of the streamlines; we do not verify if they are aligned with the brain's orientation. -Formally: scil_filter_streamlines_by_orientation.py +Formerly: scil_filter_streamlines_by_orientation.py """ import argparse diff --git a/scripts/scil_tractogram_filter_by_roi.py b/scripts/scil_tractogram_filter_by_roi.py index 58f4e54da2..4a90af52dc 100755 --- a/scripts/scil_tractogram_filter_by_roi.py +++ b/scripts/scil_tractogram_filter_by_roi.py @@ -33,7 +33,7 @@ voxel size (e.g > 2.5mm). The value is in voxel for ROIs and in mm for bounding box. Anisotropic data will affect each direction differently -Formally: scil_filter_tractogram.py +Formerly: scil_filter_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_fix_trk.py b/scripts/scil_tractogram_fix_trk.py index 6fae8c847d..28b58a79fb 100755 --- a/scripts/scil_tractogram_fix_trk.py +++ b/scripts/scil_tractogram_fix_trk.py @@ -39,7 +39,7 @@ evolve quickly and results may vary depending on the data itself as well as DSI-studio/Startrack version. -Formally: scil_fix_dsi_studio_trk.py +Formerly: scil_fix_dsi_studio_trk.py """ import argparse diff --git a/scripts/scil_tractogram_flip.py b/scripts/scil_tractogram_flip.py index 1f9eb694d0..d0bb7d43af 100755 --- a/scripts/scil_tractogram_flip.py +++ b/scripts/scil_tractogram_flip.py @@ -8,7 +8,7 @@ It's better to fix the real tools than to force flipping streamlines to have them fit in the tools. -Formally: scil_flip_streamlines.py +Formerly: scil_flip_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_math.py b/scripts/scil_tractogram_math.py index 2a009fbb35..3e96e6dd9c 100755 --- a/scripts/scil_tractogram_math.py +++ b/scripts/scil_tractogram_math.py @@ -37,7 +37,7 @@ --no_metadata to strip the metadata from the output. Or --fake_metadata to initialize dummy metadata in the file missing them. -Formally: scil_streamlines_math.py +Formerly: scil_streamlines_math.py """ import argparse diff --git a/scripts/scil_tractogram_qbx.py b/scripts/scil_tractogram_qbx.py index f19628cf37..dfbceb71fb 100755 --- a/scripts/scil_tractogram_qbx.py +++ b/scripts/scil_tractogram_qbx.py @@ -5,7 +5,7 @@ Compute clusters using QuickBundlesX and save them separately. We cannot know the number of clusters in advance. -Formally: scil_compute_qbx.py +Formerly: scil_compute_qbx.py """ import argparse diff --git a/scripts/scil_tractogram_register.py b/scripts/scil_tractogram_register.py index 2f308e0f54..2feaf18469 100755 --- a/scripts/scil_tractogram_register.py +++ b/scripts/scil_tractogram_register.py @@ -9,7 +9,7 @@ For more informations on how to use the various registration scripts see the doc/tractogram_registration.md readme file -Formally: scil_register_tractogram.py +Formerly: scil_register_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_remove_invalid.py b/scripts/scil_tractogram_remove_invalid.py index 8ddc8d676c..3942d9b3e5 100755 --- a/scripts/scil_tractogram_remove_invalid.py +++ b/scripts/scil_tractogram_remove_invalid.py @@ -9,7 +9,7 @@ The --cut_invalid option will cut streamlines so that their longest segment are within the bounding box -Formally: scil_remove_invalid_streamlines.py +Formerly: scil_remove_invalid_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_resample.py b/scripts/scil_tractogram_resample.py index 94d1cdb2c7..f243e930df 100755 --- a/scripts/scil_tractogram_resample.py +++ b/scripts/scil_tractogram_resample.py @@ -26,7 +26,7 @@ --point_wise_std 0.5 --spline 5 10 --keep_invalid_streamlines $ scil_visualize_bundles.py output.trk --local_coloring --width=0.1 -Formally: scil_resample_tractogram.py +Formerly: scil_resample_tractogram.py """ import argparse diff --git a/scripts/scil_tractogram_resample_nb_points.py b/scripts/scil_tractogram_resample_nb_points.py index 46ad5b7cb4..6b18268de8 100755 --- a/scripts/scil_tractogram_resample_nb_points.py +++ b/scripts/scil_tractogram_resample_nb_points.py @@ -5,7 +5,7 @@ Script to resample a set of streamlines to either a new number of points per streamline or to a fixed step size. WARNING: data_per_point is not carried. -Formally: scil_resample_streamlines.py +Formerly: scil_resample_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_seed_density_map.py b/scripts/scil_tractogram_seed_density_map.py index 4dbdec5574..205b1181f1 100755 --- a/scripts/scil_tractogram_seed_density_map.py +++ b/scripts/scil_tractogram_seed_density_map.py @@ -4,7 +4,7 @@ """ Compute a density map of seeds saved in .trk file. -Formally: scil_compute_seed_density_map.py +Formerly: scil_compute_seed_density_map.py """ import argparse diff --git a/scripts/scil_tractogram_segment_bundles.py b/scripts/scil_tractogram_segment_bundles.py index b7bb9efbe2..84677a27e9 100755 --- a/scripts/scil_tractogram_segment_bundles.py +++ b/scripts/scil_tractogram_segment_bundles.py @@ -26,7 +26,7 @@ This is important because many instances of data structures are initialized in parallel and can lead to a RAM overflow. -Formally: scil_recognize_multi_bundles.py +Formerly: scil_recognize_multi_bundles.py """ import argparse diff --git a/scripts/scil_tractogram_segment_bundles_for_connectivity.py b/scripts/scil_tractogram_segment_bundles_for_connectivity.py index f0e9bdad18..73de11204c 100755 --- a/scripts/scil_tractogram_segment_bundles_for_connectivity.py +++ b/scripts/scil_tractogram_segment_bundles_for_connectivity.py @@ -27,7 +27,7 @@ - 30 minutes with full post-processing, only saving final bundles. - 60 minutes with full post-processing, saving all possible files. -Formally: scil_decompose_connectivity.py +Formerly: scil_decompose_connectivity.py """ import argparse diff --git a/scripts/scil_tractogram_segment_one_bundles.py b/scripts/scil_tractogram_segment_one_bundles.py index 02abf09b86..7fd4274eff 100755 --- a/scripts/scil_tractogram_segment_one_bundles.py +++ b/scripts/scil_tractogram_segment_one_bundles.py @@ -13,7 +13,7 @@ warning in both case it means the transformation is very close to identity and both 'direction' will work. -Formally: scil_recognize_single_bundles.py +Formerly: scil_recognize_single_bundles.py """ import argparse diff --git a/scripts/scil_tractogram_shuffle.py b/scripts/scil_tractogram_shuffle.py index ca9bee335b..3db893fd10 100755 --- a/scripts/scil_tractogram_shuffle.py +++ b/scripts/scil_tractogram_shuffle.py @@ -4,7 +4,7 @@ """ Shuffle the ordering of streamlines. -Formally: scil_shuffle_streamlines.py +Formerly: scil_shuffle_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_smooth.py b/scripts/scil_tractogram_smooth.py index 1c4b39d53d..c67451640b 100755 --- a/scripts/scil_tractogram_smooth.py +++ b/scripts/scil_tractogram_smooth.py @@ -22,7 +22,7 @@ will create crazy streamlines that could end up out of the bounding box. - data_per_point will be lost. -Formally: scil_smooth_streamlines.py +Formerly: scil_smooth_streamlines.py """ import argparse diff --git a/scripts/scil_tractogram_split.py b/scripts/scil_tractogram_split.py index 5cc4ca4ed8..9a7cf434b5 100755 --- a/scripts/scil_tractogram_split.py +++ b/scripts/scil_tractogram_split.py @@ -11,7 +11,7 @@ on). - randomly, but per Quickbundles clusters. -Formally: scil_split_tractogram.py +Formerly: scil_split_tractogram.py """ import argparse import logging diff --git a/scripts/scil_tractogram_uniformize_endpoints.py b/scripts/scil_tractogram_uniformize_endpoints.py index 8a320d355a..8590fd2685 100755 --- a/scripts/scil_tractogram_uniformize_endpoints.py +++ b/scripts/scil_tractogram_uniformize_endpoints.py @@ -15,7 +15,7 @@ axis. The target mask can be a binary mask or an atlas. If an atlas is used, labels are expected in the form of --target atlas.nii.gz 2 3 5:7. -Formally: scil_uniformize_streamlines_endpoints.py +Formerly: scil_uniformize_streamlines_endpoints.py """ import argparse diff --git a/scripts/scil_volume_apply_transform.py b/scripts/scil_volume_apply_transform.py index dfc391837c..a09748a443 100755 --- a/scripts/scil_volume_apply_transform.py +++ b/scripts/scil_volume_apply_transform.py @@ -7,7 +7,7 @@ For more information on how to use the registration script, follow this link: https://scilpy.readthedocs.io/en/latest/documentation/tractogram_registration.html -Formally: scil_apply_transform_to_image.py. +Formerly: scil_apply_transform_to_image.py. """ import argparse diff --git a/scripts/scil_volume_count_non_zero_voxels.py b/scripts/scil_volume_count_non_zero_voxels.py index 7430c42b01..fc6448846d 100755 --- a/scripts/scil_volume_count_non_zero_voxels.py +++ b/scripts/scil_volume_count_non_zero_voxels.py @@ -9,7 +9,7 @@ This means that if there is at least one non-zero voxel in the 4th dimension, this voxel of the 3D volume will be considered as non-zero. -Formally: scil_count_non_zero_voxels.py +Formerly: scil_count_non_zero_voxels.py """ import argparse diff --git a/scripts/scil_volume_crop.py b/scripts/scil_volume_crop.py index a93cf14ee4..0a80f61439 100755 --- a/scripts/scil_volume_crop.py +++ b/scripts/scil_volume_crop.py @@ -10,7 +10,7 @@ it's looking for non-zero data. Therefore, you should validate the results on other types of images that haven't been masked. -Formally: scil_crop_volume.py +Formerly: scil_crop_volume.py """ import argparse diff --git a/scripts/scil_volume_flip.py b/scripts/scil_volume_flip.py index 49562d378b..1c2c0592ad 100755 --- a/scripts/scil_volume_flip.py +++ b/scripts/scil_volume_flip.py @@ -3,7 +3,7 @@ """ Flip the volume according to the specified axis. -Formally: scil_flip_volume.py +Formerly: scil_flip_volume.py """ import argparse diff --git a/scripts/scil_volume_math.py b/scripts/scil_volume_math.py index c130d0d7f6..af2448a4cd 100755 --- a/scripts/scil_volume_math.py +++ b/scripts/scil_volume_math.py @@ -12,7 +12,7 @@ parameters instead of images. > scil_volume_math.py multiplication img.nii.gz 10 mult_10.nii.gz -Formally: scil_image_math.py +Formerly: scil_image_math.py """ import argparse diff --git a/scripts/scil_volume_remove_outliers_ransac.py b/scripts/scil_volume_remove_outliers_ransac.py index c6dafcc24e..72f9ae1904 100755 --- a/scripts/scil_volume_remove_outliers_ransac.py +++ b/scripts/scil_volume_remove_outliers_ransac.py @@ -7,7 +7,7 @@ NOTE: Current default parameters are tuned for ad/md/rd images only. -Formally: scil_remove_outliers_ransac.py +Formerly: scil_remove_outliers_ransac.py """ import argparse diff --git a/scripts/scil_volume_resample.py b/scripts/scil_volume_resample.py index 61ded50fc7..a037206dcb 100755 --- a/scripts/scil_volume_resample.py +++ b/scripts/scil_volume_resample.py @@ -5,7 +5,7 @@ Script to resample a dataset to match the resolution of another reference dataset or to the resolution specified as in argument. -Formally: scil_resample_volume.py +Formerly: scil_resample_volume.py """ import argparse diff --git a/scripts/scil_volume_reshape_to_reference.py b/scripts/scil_volume_reshape_to_reference.py index fc7e220425..8375acc6ad 100755 --- a/scripts/scil_volume_reshape_to_reference.py +++ b/scripts/scil_volume_reshape_to_reference.py @@ -9,7 +9,7 @@ >>> scil_volume_reshape_to_reference.py wmparc.mgz t1.nii.gz wmparc_t1.nii.gz\\ --interpolation nearest -Formally: scil_reshape_to_reference.py +Formerly: scil_reshape_to_reference.py """ import argparse diff --git a/scripts/scil_volume_stats_in_labels.py b/scripts/scil_volume_stats_in_labels.py index ee4cc45889..981c412bf7 100755 --- a/scripts/scil_volume_stats_in_labels.py +++ b/scripts/scil_volume_stats_in_labels.py @@ -6,7 +6,7 @@ Here we want to estimate the seeding attribution to cortical area affected by the bundle -Formally: scil_compute_seed_by_labels.py +Formerly: scil_compute_seed_by_labels.py """ import argparse From d49b61bb1f3f7e645b7f76d5636f931c97590963 Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 14 Dec 2023 13:57:23 -0500 Subject: [PATCH 20/63] delete duplicate --- scripts/legacy/scil_convert_json_to_xlsx.py | 21 + scripts/legacy/scil_harmonize_json.py | 21 + scripts/legacy/scil_merge_json.py | 21 + scripts/scil_json_convert_to_xlsx.py | 486 -------------------- scripts/scil_json_harmonize.py | 82 ---- scripts/scil_json_merge.py | 96 ---- 6 files changed, 63 insertions(+), 664 deletions(-) create mode 100755 scripts/legacy/scil_convert_json_to_xlsx.py create mode 100755 scripts/legacy/scil_harmonize_json.py create mode 100755 scripts/legacy/scil_merge_json.py delete mode 100755 scripts/scil_json_convert_to_xlsx.py delete mode 100755 scripts/scil_json_harmonize.py delete mode 100755 scripts/scil_json_merge.py diff --git a/scripts/legacy/scil_convert_json_to_xlsx.py b/scripts/legacy/scil_convert_json_to_xlsx.py new file mode 100755 index 0000000000..7e0b101c6d --- /dev/null +++ b/scripts/legacy/scil_convert_json_to_xlsx.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_json_convert_entries_to_xlsx import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_json_convert_entries_to_xlsx.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_convert_json_to_xlsx.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/legacy/scil_harmonize_json.py b/scripts/legacy/scil_harmonize_json.py new file mode 100755 index 0000000000..b6542a87a8 --- /dev/null +++ b/scripts/legacy/scil_harmonize_json.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_json_harmonize_entries import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_json_harmonize_entries.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_merge_json.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/legacy/scil_merge_json.py b/scripts/legacy/scil_merge_json.py new file mode 100755 index 0000000000..bdc5efa911 --- /dev/null +++ b/scripts/legacy/scil_merge_json.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_json_merge_entries import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_json_merge_entries.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_json_merge_entries.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/scil_json_convert_to_xlsx.py b/scripts/scil_json_convert_to_xlsx.py deleted file mode 100755 index 879e1ccd2d..0000000000 --- a/scripts/scil_json_convert_to_xlsx.py +++ /dev/null @@ -1,486 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -""" Convert a final aggregated json file to an Excel spreadsheet. -Typically used during the tractometry pipeline. -""" - -import argparse -import json - -import numpy as np -import pandas as pd - -from scilpy.io.utils import (add_overwrite_arg, - assert_inputs_exist, assert_outputs_exist) - - -def _get_all_bundle_names(stats): - bnames = set() - - for bundles in iter(stats.values()): - bnames |= set(bundles.keys()) - - return list(bnames) - - -def _are_all_elements_scalars(bundle_stat): - for v in iter(bundle_stat.values()): - if type(v) is not int and type(v) is not float: - return False - - return True - - -def _get_metrics_names(stats): - mnames = set() - - for bundles in iter(stats.values()): - for val in iter(bundles.values()): - mnames |= set(val.keys()) - return mnames - - -def _get_labels(stats): - labels = set() - - for bundles in iter(stats.values()): - for lab in iter(bundles.values()): - if type(lab[list(lab.keys())[0]]) is dict: - for vals in iter(lab.values()): - labels |= set(vals.keys()) - else: - labels |= set(lab.keys()) - - return list(labels) - - -def _find_stat_name(stats): - first_sub_stats = stats[list(stats.keys())[0]] - first_bundle_stats = first_sub_stats[list(first_sub_stats.keys())[0]] - - return list(first_bundle_stats.keys())[0] - - -def _get_stats_parse_function(stats, stats_over_population): - first_sub_stats = stats[list(stats.keys())[0]] - first_bundle_stats = first_sub_stats[list(first_sub_stats.keys())[0]] - first_bundle_substat = first_bundle_stats[list( - first_bundle_stats.keys())[0]] - - if len(first_bundle_stats.keys()) == 1 and\ - _are_all_elements_scalars(first_bundle_stats): - return _parse_scalar_stats - elif len(first_bundle_stats.keys()) == 4 and \ - set(first_bundle_stats.keys()) == \ - set(['lesion_total_vol', 'lesion_avg_vol', 'lesion_std_vol', - 'lesion_count']): - return _parse_lesion - elif len(first_bundle_stats.keys()) == 4 and \ - set(first_bundle_stats.keys()) == \ - set(['min_length', 'max_length', 'mean_length', 'std_length']): - return _parse_lengths - elif type(first_bundle_substat) is dict: - sub_keys = list(first_bundle_substat.keys()) - if set(sub_keys) == set(['mean', 'std']): - if stats_over_population: - return _parse_per_label_population_stats - else: - return _parse_scalar_meanstd - elif type(first_bundle_substat[sub_keys[0]]) is dict: - return _parse_per_point_meanstd - elif _are_all_elements_scalars(first_bundle_substat): - return _parse_per_label_scalar - - raise IOError('Unable to recognize stats type!') - - -def _write_dataframes(dataframes, df_names, output_path): - with pd.ExcelWriter(output_path) as writer: - for df, df_name in zip(dataframes, df_names): - df.to_excel(writer, sheet_name=df_name) - - -def _parse_scalar_stats(stats, subs, bundles): - stat_name = _find_stat_name(stats) - - nb_subs = len(subs) - nb_bundles = len(bundles) - - stats_array = np.full((nb_subs, nb_bundles), np.NaN) - - for sub_id, sub_name in enumerate(subs): - for bundle_id, bundle_name in enumerate(bundles): - b_stat = stats[sub_name].get(bundle_name) - - if b_stat is not None: - stats_array[sub_id, bundle_id] = b_stat[stat_name] - - dataframes = [pd.DataFrame(data=stats_array, - index=subs, - columns=bundles)] - df_names = [stat_name] - - return dataframes, df_names - - -def _parse_scalar_meanstd(stats, subs, bundles): - metric_names = _get_metrics_names(stats) - - nb_subs = len(subs) - nb_bundles = len(bundles) - nb_metrics = len(metric_names) - - means = np.full((nb_subs, nb_bundles, nb_metrics), np.NaN) - stddev = np.full((nb_subs, nb_bundles, nb_metrics), np.NaN) - - for sub_id, sub_name in enumerate(subs): - for bundle_id, bundle_name in enumerate(bundles): - for metric_id, metric_name in enumerate(metric_names): - b_stat = stats[sub_name].get(bundle_name) - - if b_stat is not None: - m_stat = b_stat.get(metric_name) - - if m_stat is not None: - means[sub_id, bundle_id, metric_id] = m_stat['mean'] - stddev[sub_id, bundle_id, metric_id] = m_stat['std'] - - dataframes = [] - df_names = [] - - for metric_id, metric_name in enumerate(metric_names): - dataframes.append(pd.DataFrame(data=means[:, :, metric_id], - index=subs, columns=bundles)) - df_names.append(metric_name + "_mean") - - dataframes.append(pd.DataFrame(data=stddev[:, :, metric_id], - index=subs, columns=bundles)) - df_names.append(metric_name + "_std") - - return dataframes, df_names - - -def _parse_scalar_lesions(stats, subs, bundles): - metric_names = _get_metrics_names(stats) - nb_subs = len(subs) - nb_bundles = len(bundles) - nb_metrics = len(metric_names) - - means = np.full((nb_subs, nb_bundles, nb_metrics), np.NaN) - stddev = np.full((nb_subs, nb_bundles, nb_metrics), np.NaN) - - for sub_id, sub_name in enumerate(subs): - for bundle_id, bundle_name in enumerate(bundles): - for metric_id, metric_name in enumerate(metric_names): - b_stat = stats[sub_name].get(bundle_name) - - if b_stat is not None: - m_stat = b_stat.get(metric_name) - - if m_stat is not None: - means[sub_id, bundle_id, metric_id] = m_stat['mean'] - stddev[sub_id, bundle_id, metric_id] = m_stat['std'] - - dataframes = [] - df_names = [] - - for metric_id, metric_name in enumerate(metric_names): - dataframes.append(pd.DataFrame(data=means[:, :, metric_id], - index=subs, columns=bundles)) - df_names.append(metric_name + "_mean") - - dataframes.append(pd.DataFrame(data=stddev[:, :, metric_id], - index=subs, columns=bundles)) - df_names.append(metric_name + "_std") - - return dataframes, df_names - - -def _parse_lengths(stats, subs, bundles): - nb_subs = len(subs) - nb_bundles = len(bundles) - - min_lengths = np.full((nb_subs, nb_bundles), np.NaN) - max_lengths = np.full((nb_subs, nb_bundles), np.NaN) - mean_lengths = np.full((nb_subs, nb_bundles), np.NaN) - std_lengths = np.full((nb_subs, nb_bundles), np.NaN) - - for sub_id, sub_name in enumerate(subs): - for bundle_id, bundle_name in enumerate(bundles): - b_stat = stats[sub_name].get(bundle_name) - - if b_stat is not None: - min_lengths[sub_id, bundle_id] = b_stat['min_length'] - max_lengths[sub_id, bundle_id] = b_stat['max_length'] - mean_lengths[sub_id, bundle_id] = b_stat['mean_length'] - std_lengths[sub_id, bundle_id] = b_stat['std_length'] - - dataframes = [pd.DataFrame(data=min_lengths, - index=subs, - columns=bundles), - pd.DataFrame(data=max_lengths, - index=subs, - columns=bundles), - pd.DataFrame(data=mean_lengths, - index=subs, - columns=bundles), - pd.DataFrame(data=std_lengths, - index=subs, - columns=bundles)] - - df_names = ["min_length", "max_length", "mean_length", "std_length"] - - return dataframes, df_names - - -def _parse_lesion(stats, subs, bundles): - nb_subs = len(subs) - nb_bundles = len(bundles) - - total_volume = np.full((nb_subs, nb_bundles), np.NaN) - avg_volume = np.full((nb_subs, nb_bundles), np.NaN) - std_volume = np.full((nb_subs, nb_bundles), np.NaN) - lesion_count = np.full((nb_subs, nb_bundles), np.NaN) - - for sub_id, sub_name in enumerate(subs): - for bundle_id, bundle_name in enumerate(bundles): - b_stat = stats[sub_name].get(bundle_name) - - if b_stat is not None: - total_volume[sub_id, bundle_id] = b_stat['lesion_total_vol'] - avg_volume[sub_id, bundle_id] = b_stat['lesion_avg_vol'] - std_volume[sub_id, bundle_id] = b_stat['lesion_std_vol'] - lesion_count[sub_id, bundle_id] = b_stat['lesion_count'] - - dataframes = [pd.DataFrame(data=total_volume, - index=subs, - columns=bundles), - pd.DataFrame(data=avg_volume, - index=subs, - columns=bundles), - pd.DataFrame(data=std_volume, - index=subs, - columns=bundles), - pd.DataFrame(data=lesion_count, - index=subs, - columns=bundles)] - - df_names = ["lesion_total_vol", "lesion_avg_vol", - "lesion_std_vol", "lesion_count"] - - return dataframes, df_names - - -def _parse_per_label_scalar(stats, subs, bundles): - labels = _get_labels(stats) - labels.sort() - - nb_subs = len(subs) - nb_bundles = len(bundles) - nb_labels = len(labels) - - stats_array = np.full((nb_subs, nb_bundles * nb_labels), np.NaN) - column_names = [] - for bundle_name in bundles: - column_names.extend(["{}_{}".format(bundle_name, label) - for label in labels]) - - stat_name = _find_stat_name(stats) - for sub_id, sub_name in enumerate(subs): - for bundle_id, bundle_name in enumerate(bundles): - - b_stat = stats[sub_name].get(bundle_name) - - if b_stat is not None: - m_stat = b_stat.get(stat_name) - - if m_stat is not None: - for label_id, label in enumerate(labels): - label_stat = m_stat.get(label) - - if label_stat is not None: - stats_array[sub_id, - bundle_id * len(labels) + label_id] =\ - label_stat - - dataframes = [pd.DataFrame(data=stats_array, - index=subs, - columns=column_names)] - df_names = ['{}_per_label'.format(stat_name)] - - return dataframes, df_names - - -def _parse_per_point_meanstd(stats, subs, bundles): - labels = _get_labels(stats) - labels.sort() - - metric_names = _get_metrics_names(stats) - - nb_subs = len(subs) - nb_bundles = len(bundles) - nb_labels = len(labels) - nb_metrics = len(metric_names) - - means = np.full((nb_subs, nb_bundles * nb_labels, nb_metrics), np.NaN) - stddev = np.full((nb_subs, nb_bundles * nb_labels, nb_metrics), np.NaN) - - for sub_id, sub_name in enumerate(subs): - for bundle_id, bundle_name in enumerate(bundles): - b_stat = stats[sub_name].get(bundle_name) - - if b_stat is not None: - for metric_id, metric_name in enumerate(metric_names): - m_stat = b_stat.get(metric_name) - - if m_stat is not None: - for label_id, label in enumerate(labels): - label_stat = m_stat.get(label) - - if label_stat is not None: - means[sub_id, - bundle_id * len(labels) + label_id, - metric_id] =\ - label_stat['mean'] - stddev[sub_id, - bundle_id * len(labels) + label_id, - metric_id] =\ - label_stat['std'] - - column_names = [] - for bundle_name in bundles: - column_names.extend(["{}_{}".format(bundle_name, label) - for label in labels]) - - dataframes = [] - df_names = [] - for metric_id, metric_name in enumerate(metric_names): - dataframes.append(pd.DataFrame(data=means[:, :, metric_id], - index=subs, columns=column_names)) - df_names.append(metric_name + "_mean") - - dataframes.append(pd.DataFrame(data=stddev[:, :, metric_id], - index=subs, columns=column_names)) - df_names.append(metric_name + "_std") - - return dataframes, df_names - - -def _parse_per_label_population_stats(stats, bundles, metrics): - labels = list(stats[bundles[0]][metrics[0]].keys()) - labels.sort() - - nb_bundles = len(bundles) - nb_labels = len(labels) - nb_metrics = len(metrics) - - means = np.full((nb_bundles, nb_labels, nb_metrics), np.NaN) - stddev = np.full((nb_bundles, nb_labels, nb_metrics), np.NaN) - - for bundle_id, bundle_name in enumerate(bundles): - b_stat = stats.get(bundle_name) - - if b_stat is not None: - for metric_id, metric_name in enumerate(metrics): - m_stat = b_stat.get(metric_name) - - if m_stat is not None: - for label_id, label in enumerate(labels): - label_stat = m_stat.get(label) - - if label_stat is not None: - means[bundle_id, label_id, metric_id] =\ - np.average(label_stat['mean']) - stddev[bundle_id, label_id, metric_id] =\ - np.average(label_stat['std']) - - dataframes = [] - df_names = [] - for metric_id, metric_name in enumerate(metrics): - dataframes.append(pd.DataFrame(data=np.array(means[:, :, metric_id]), - index=bundles, - columns=labels)) - df_names.append(metric_name + "_mean") - - dataframes.append(pd.DataFrame(data=np.array(stddev[:, :, metric_id]), - index=bundles, - columns=labels)) - df_names.append(metric_name + "_std") - - return dataframes, df_names - - -def _create_xlsx_from_json(json_path, xlsx_path, - sort_subs=True, sort_bundles=True, - ignored_bundles_fpath=None, - stats_over_population=False): - with open(json_path, 'r') as json_file: - stats = json.load(json_file) - - subs = list(stats.keys()) - if sort_subs: - subs.sort() - - bundle_names = _get_all_bundle_names(stats) - if sort_bundles: - bundle_names.sort() - - if ignored_bundles_fpath is not None: - with open(ignored_bundles_fpath, 'r') as f: - bundles_to_ignore = [l.strip() for l in f] - bundle_names = filter(lambda name: name not in bundles_to_ignore, - bundle_names) - - cur_stats_func = _get_stats_parse_function(stats, stats_over_population) - - dataframes, df_names = cur_stats_func(stats, subs, bundle_names) - - if len(dataframes): - _write_dataframes(dataframes, df_names, xlsx_path) - - -def _build_arg_parser(): - p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, - description=__doc__) - - p.add_argument('in_json', - help='File containing the json stats (.json).') - - p.add_argument('out_xlsx', - help='Output Excel file for the stats (.xlsx).') - - p.add_argument('--no_sort_subs', action='store_false', - help='If set, subjects won\'t be sorted alphabetically.') - - p.add_argument('--no_sort_bundles', action='store_false', - help='If set, bundles won\'t be sorted alphabetically.') - p.add_argument('--ignore_bundles', metavar='FILE', - help='Path to a text file containing a list of bundles ' - 'to ignore (.txt).\nOne bundle, corresponding to keys ' - 'in the json, per line.') - p.add_argument('--stats_over_population', action='store_true', - help='If set, consider the input stats to be over an ' - 'entire population and not subject-based.') - - add_overwrite_arg(p) - - return p - - -def main(): - parser = _build_arg_parser() - args = parser.parse_args() - - assert_inputs_exist(parser, args.in_json) - assert_outputs_exist(parser, args, args.out_xlsx) - - _create_xlsx_from_json(args.in_json, args.out_xlsx, - sort_subs=args.no_sort_subs, - sort_bundles=args.no_sort_bundles, - ignored_bundles_fpath=args.ignore_bundles, - stats_over_population=args.stats_over_population) - - -if __name__ == "__main__": - main() diff --git a/scripts/scil_json_harmonize.py b/scripts/scil_json_harmonize.py deleted file mode 100755 index a735089857..0000000000 --- a/scripts/scil_json_harmonize.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -""" This script will harmonize a json file by adding missing keys and values -that differs between the different layers of the dictionary. - -This is use only (for now) in Aggregate_All_* portion of tractometry-flow, -to counter the problem of missing bundles/metrics/lesions between subjects. - -The most common use case is when specific subjects have missing bundles -which will cause a panda array to be incomplete, and thus crash. Finding out -the union of all bundles/metrics/lesions will allow to create a complete json -(but with NaN for missing values). -""" - -import argparse -from copy import deepcopy -import json - -from deepdiff import DeepDiff - -from scilpy.io.utils import (assert_inputs_exist, - add_json_args, - assert_outputs_exist, - add_overwrite_arg, - add_verbose_arg) -from scilpy.utils.util import recursive_update, recursive_print - - -def _build_arg_parser(): - p = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse.RawTextHelpFormatter) - - p.add_argument('in_file', - help='Input file (json).') - p.add_argument('out_file', - help='Output file (json).') - add_json_args(p) - add_overwrite_arg(p) - add_verbose_arg(p) - - return p - - -def main(): - parser = _build_arg_parser() - args = parser.parse_args() - - assert_inputs_exist(parser, args.in_file) - assert_outputs_exist(parser, args, args.out_file) - - with open(args.in_file) as f: - data = json.load(f) - data_old = deepcopy(data) - - # Generate the reference (full) dictionary. Skip level 0, complete all - # levels, but write NaN at last level (leaf). - new = {} - for key in data.keys(): - new = recursive_update(new, data[key], from_existing=False) - - # Harmonize the original dictionary, missing keys are added, when a - # is missing NaN will be stored - for key in data.keys(): - data[key] = recursive_update(data[key], new, from_existing=True) - - if args.verbose: - print('Layered keys of the dictionary:') - recursive_print(data) - print() - - dd = DeepDiff(data, data_old, ignore_order=True) - if 'dictionary_item_removed' in dd: - print('Missing keys that were harmonized:') - print(dd['dictionary_item_removed']) - - with open(args.out_file, "w") as f: - json.dump(data, f, indent=args.indent, sort_keys=args.sort_keys) - - -if __name__ == "__main__": - main() diff --git a/scripts/scil_json_merge.py b/scripts/scil_json_merge.py deleted file mode 100755 index 7f7e624f57..0000000000 --- a/scripts/scil_json_merge.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -""" Merge multiple json file into a single one. -Typically used during the tractometry pipeline. - -Without option it will simply merge all entries at the top level, the top -level must not have any conflicting keys. - ---keep_separate option will add a parent for each file, its basename will -become the key. - ---no_list option will merge all entries at the top level, if there is a -conflict the lowest level will be extended with the new values (if list) or -added (if value) - ---add_parent_key option will add a parent key before merging all entries. - ---remove_parent_key option will remove the parent key before merging all -entries. - ---recursive option will merge all entries (scalar) at the lowest layers as a -list. - ---average_last_layer option will average all entries (scalar) at the lowest -layers, but instead of creating a list it creates a mean/std level. -""" - -import argparse -import json -import os - -from scilpy.io.utils import (add_overwrite_arg, add_json_args, - assert_inputs_exist, assert_outputs_exist) -from scilpy.tractanalysis.json_utils import merge_dict, average_dict - - -def _build_arg_parser(): - p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, - description=__doc__) - - p.add_argument('in_json', nargs='+', - help='List of json files to merge (.json).') - p.add_argument('out_json', - help='Output json file (.json).') - - p.add_argument('--keep_separate', action='store_true', - help='Merge entries as separate keys based on filename.') - p.add_argument('--no_list', action='store_true', - help='Merge entries knowing there is no conflict.') - p.add_argument('--add_parent_key', - help='Merge all entries under a single parent.') - p.add_argument('--remove_parent_key', action='store_true', - help='Merge ignoring parent key (e.g for population).') - p.add_argument('--recursive', action='store_true', - help='Merge all entries at the lowest layers.') - p.add_argument('--average_last_layer', action='store_true', - help='Average all entries at the lowest layers.') - add_json_args(p) - add_overwrite_arg(p) - - return p - - -def main(): - parser = _build_arg_parser() - args = parser.parse_args() - - assert_inputs_exist(parser, args.in_json) - assert_outputs_exist(parser, args, args.out_json) - - out_dict = {} - for in_file in args.in_json: - with open(in_file, 'r') as json_file: - in_dict = json.load(json_file) - if args.remove_parent_key: - in_dict = list(in_dict.values())[0] - if args.keep_separate: - out_dict[os.path.splitext(in_file)[0]] = in_dict - else: - out_dict = merge_dict(out_dict, in_dict, - no_list=args.no_list, - recursive=args.recursive) - - if args.average_last_layer: - out_dict = average_dict(out_dict) - - with open(args.out_json, 'w') as outfile: - if args.add_parent_key: - out_dict = {args.add_parent_key: out_dict} - json.dump(out_dict, outfile, - indent=args.indent, sort_keys=args.sort_keys) - - -if __name__ == "__main__": - main() From 1998f3d3898a8e9710a140ef0efbe846be07aafc Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Thu, 14 Dec 2023 14:03:18 -0500 Subject: [PATCH 21/63] Rename lobe_specific to bingham --- ...py => bingham_metric_along_streamlines.py} | 28 ++++++------- ...scil_compute_lobe_specific_fodf_metrics.py | 4 +- ...ute_mean_fixel_lobe_metric_from_bundles.py | 6 +-- ...ham_metrics.py => scil_bingham_metrics.py} | 8 ++-- ... scil_bundle_mean_fixel_bingham_metric.py} | 42 +++++++++---------- ...ham_metrics.py => test_bingham_metrics.py} | 8 ++-- ... test_bundle_mean_fixel_bingham_metric.py} | 4 +- 7 files changed, 48 insertions(+), 52 deletions(-) rename scilpy/tractanalysis/{lobe_metrics_along_streamlines.py => bingham_metric_along_streamlines.py} (79%) rename scripts/{scil_fodf_bingham_metrics.py => scil_bingham_metrics.py} (94%) rename scripts/{scil_bundle_mean_fixel_lobe_metric.py => scil_bundle_mean_fixel_bingham_metric.py} (62%) rename scripts/tests/{test_fodf_bingham_metrics.py => test_bingham_metrics.py} (86%) rename scripts/tests/{test_bundle_mean_fixel_lobe_metric.py => test_bundle_mean_fixel_bingham_metric.py} (88%) diff --git a/scilpy/tractanalysis/lobe_metrics_along_streamlines.py b/scilpy/tractanalysis/bingham_metric_along_streamlines.py similarity index 79% rename from scilpy/tractanalysis/lobe_metrics_along_streamlines.py rename to scilpy/tractanalysis/bingham_metric_along_streamlines.py index 7429c377bb..f20fa27e9b 100644 --- a/scilpy/tractanalysis/lobe_metrics_along_streamlines.py +++ b/scilpy/tractanalysis/bingham_metric_along_streamlines.py @@ -1,16 +1,15 @@ # -*- coding: utf-8 -*- -from dipy.io.stateful_tractogram import StatefulTractogram import numpy as np from scilpy.reconst.bingham import bingham_to_peak_direction from scilpy.tractanalysis.grid_intersections import grid_intersections -def lobe_specific_metric_map_along_streamlines(sft, bingham_coeffs, - metric, max_theta, - length_weighting): +def bingham_metric_map_along_streamlines(sft, bingham_coeffs, + metric, max_theta, + length_weighting): """ - Compute mean map for a given lobe-specific metric along streamlines. + Compute mean map for a given Bingham metric along streamlines. Parameters ---------- @@ -20,8 +19,7 @@ def lobe_specific_metric_map_along_streamlines(sft, bingham_coeffs, Array of shape (X, Y, Z, N_LOBES, NB_PARAMS) containing the Bingham distributions parameters. metric : ndarray - Array of shape (X, Y, Z) containing the lobe-specific - metric of interest. + Array of shape (X, Y, Z) containing the Bingham metric of interest. max_theta : float Maximum angle in degrees between the fiber direction and the Bingham peak direction. @@ -30,9 +28,9 @@ def lobe_specific_metric_map_along_streamlines(sft, bingham_coeffs, """ fd_sum, weights = \ - lobe_metric_sum_along_streamlines(sft, bingham_coeffs, - metric, max_theta, - length_weighting) + bingham_metric_sum_along_streamlines(sft, bingham_coeffs, + metric, max_theta, + length_weighting) non_zeros = np.nonzero(fd_sum) weights_nz = weights[non_zeros] @@ -41,10 +39,10 @@ def lobe_specific_metric_map_along_streamlines(sft, bingham_coeffs, return fd_sum -def lobe_metric_sum_along_streamlines(sft, bingham_coeffs, metric, - max_theta, length_weighting): +def bingham_metric_sum_along_streamlines(sft, bingham_coeffs, metric, + max_theta, length_weighting): """ - Compute a sum map along a bundle for a given lobe-specific metric. + Compute a sum map along a bundle for a given Bingham metric. Parameters ---------- @@ -53,7 +51,7 @@ def lobe_metric_sum_along_streamlines(sft, bingham_coeffs, metric, bingham_coeffs : ndarray (X, Y, Z, N_LOBES, NB_PARAMS) Bingham distributions parameters volume. metric : ndarray (X, Y, Z) - The lobe-specific metric of interest. + The Bingham metric of interest. max_theta : float Maximum angle in degrees between the fiber direction and the Bingham peak direction. @@ -63,7 +61,7 @@ def lobe_metric_sum_along_streamlines(sft, bingham_coeffs, metric, Returns ------- metric_sum_map : np.array - Lobe-specific metric sum map. + Bingham metric sum map. weight_map : np.array Segment lengths. """ diff --git a/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py b/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py index 20ca2ba831..01141b1cc7 100755 --- a/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py +++ b/scripts/legacy/scil_compute_lobe_specific_fodf_metrics.py @@ -2,11 +2,11 @@ # -*- coding: utf-8 -*- from scilpy.io.deprecator import deprecate_script -from scripts.scil_fodf_bingham_metrics import main as new_main +from scripts.scil_bingham_metrics import main as new_main DEPRECATION_MSG = """ -This script has been renamed scil_fodf_lobe_specific_metrics.py. +This script has been renamed scil_bingham_metrics.py. Please change your existing pipelines accordingly. """ diff --git a/scripts/legacy/scil_compute_mean_fixel_lobe_metric_from_bundles.py b/scripts/legacy/scil_compute_mean_fixel_lobe_metric_from_bundles.py index a960e3fc78..7723020d47 100644 --- a/scripts/legacy/scil_compute_mean_fixel_lobe_metric_from_bundles.py +++ b/scripts/legacy/scil_compute_mean_fixel_lobe_metric_from_bundles.py @@ -2,16 +2,16 @@ # -*- coding: utf-8 -*- from scilpy.io.deprecator import deprecate_script -from scripts.scil_bundle_mean_fixel_lobe_metric import main as new_main +from scripts.scil_bundle_mean_fixel_bingham_metric import main as new_main DEPRECATION_MSG = """ -This script has been renamed scil_bundle_mean_fixel_lobe_metric.py. +This script has been renamed scil_bundle_mean_fixel_bingham_metric.py. Please change your existing pipelines accordingly. """ -@deprecate_script("scil_compute_mean_fixel_obe_metric_from_bundles.py", +@deprecate_script("scil_compute_mean_fixel_lobe_metric_from_bundles.py", DEPRECATION_MSG, '1.7.0') def main(): new_main() diff --git a/scripts/scil_fodf_bingham_metrics.py b/scripts/scil_bingham_metrics.py similarity index 94% rename from scripts/scil_fodf_bingham_metrics.py rename to scripts/scil_bingham_metrics.py index 53477523bb..62566a0c71 100755 --- a/scripts/scil_fodf_bingham_metrics.py +++ b/scripts/scil_bingham_metrics.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- """ Script to compute fODF lobe-specific metrics derived from a Bingham -distribution fitting, as described in [1]. Resulting metrics are fiber density +distribution fit, as described in [1]. Resulting metrics are fiber density (FD), fiber spread (FS) and fiber fraction (FF) [2]. The Bingham coefficients volume comes from scil_fodf_to_bingham.py. @@ -23,10 +23,8 @@ import logging from scilpy.io.image import get_data_as_mask -from scilpy.io.utils import (add_overwrite_arg, - add_processes_arg, - add_verbose_arg, - assert_inputs_exist, +from scilpy.io.utils import (add_overwrite_arg, add_processes_arg, + add_verbose_arg, assert_inputs_exist, assert_outputs_exist, validate_nbr_processes) from scilpy.reconst.bingham import (compute_fiber_density, compute_fiber_spread, diff --git a/scripts/scil_bundle_mean_fixel_lobe_metric.py b/scripts/scil_bundle_mean_fixel_bingham_metric.py similarity index 62% rename from scripts/scil_bundle_mean_fixel_lobe_metric.py rename to scripts/scil_bundle_mean_fixel_bingham_metric.py index caf13ee90f..71e768d6cd 100755 --- a/scripts/scil_bundle_mean_fixel_lobe_metric.py +++ b/scripts/scil_bundle_mean_fixel_bingham_metric.py @@ -2,23 +2,23 @@ # -*- coding: utf-8 -*- """ -Given a bundle and Bingham coefficients, compute the average lobe-specific +Given a bundle and Bingham coefficients, compute the average Bingham metric at each voxel intersected by the bundle. Intersected voxels are found by computing the intersection between the voxel grid and each streamline in the input tractogram. This script behaves like scil_compute_mean_fixel_afd_from_bundles.py for fODFs, -but here for Bingham distributions. These latest distributions add the unique -possibility to capture fixel-based fiber spread (FS) and fiber fraction (FF). -FD from the bingham should be "equivalent" to the AFD_fixel we are used to. +but here for Bingham distributions. These add the unique possibility to capture +fixel-based fiber spread (FS) and fiber fraction (FF). FD from the bingham +should be "equivalent" to the AFD_fixel we are used to. Bingham coefficients volume must come from scil_fodf_to_bingham.py -and lobe-specific metrics comes from scil_fodf_lobe_specific_metrics.py. +and Bingham metrics comes from scil_bingham_metrics.py. -Lobe-specific metrics are metrics extracted from Bingham distributions fitted -to fODF. There are as many values per voxel as there are lobes extracted. The -values chosen for a given voxelis the one belonging to the lobe better aligned -with the current streamline segment. +Bingham metrics are extracted from Bingham distributions fitted to fODF. There +are as many values per voxel as there are lobes extracted. The values chosen +for a given voxelis the one belonging to the lobe better aligned with the +current streamline segment. Please use a bundle file rather than a whole tractogram. """ @@ -32,8 +32,8 @@ from scilpy.io.utils import (add_overwrite_arg, add_reference_arg, assert_inputs_exist, assert_outputs_exist) -from scilpy.tractanalysis.lobe_metrics_along_streamlines \ - import lobe_specific_metric_map_along_streamlines +from scilpy.tractanalysis.bingham_metric_along_streamlines \ + import bingham_metric_map_along_streamlines def _build_arg_parser(): @@ -43,8 +43,8 @@ def _build_arg_parser(): help='Path of the bundle file.') p.add_argument('in_bingham', help='Path of the Bingham volume.') - p.add_argument('in_lobe_metric', - help='Path of the lobe-specific metric (FD, FS, or FF) ' + p.add_argument('in_bingham_metric', + help='Path of the Bingham metric (FD, FS, or FF) ' 'volume.') p.add_argument('out_mean_map', help='Path of the output mean map.') @@ -68,23 +68,23 @@ def main(): assert_inputs_exist(parser, [args.in_bundle, args.in_bingham, - args.in_lobe_metric]) + args.in_bingham_metric]) assert_outputs_exist(parser, args, [args.out_mean_map]) sft = load_tractogram_with_reference(parser, args, args.in_bundle) bingham_img = nib.load(args.in_bingham) - metric_img = nib.load(args.in_lobe_metric) + metric_img = nib.load(args.in_bingham_metric) if bingham_img.shape[-2] != metric_img.shape[-1]: parser.error('Dimension mismatch between Bingham coefficients ' - 'and lobe-specific metric image.') + 'and Bingham metric image.') metric_mean_map =\ - lobe_specific_metric_map_along_streamlines(sft, - bingham_img.get_fdata(), - metric_img.get_fdata(), - args.max_theta, - args.length_weighting) + bingham_metric_map_along_streamlines(sft, + bingham_img.get_fdata(), + metric_img.get_fdata(), + args.max_theta, + args.length_weighting) nib.Nifti1Image(metric_mean_map.astype(np.float32), bingham_img.affine).to_filename(args.out_mean_map) diff --git a/scripts/tests/test_fodf_bingham_metrics.py b/scripts/tests/test_bingham_metrics.py similarity index 86% rename from scripts/tests/test_fodf_bingham_metrics.py rename to scripts/tests/test_bingham_metrics.py index f7161482a7..6443a23337 100644 --- a/scripts/tests/test_fodf_bingham_metrics.py +++ b/scripts/tests/test_bingham_metrics.py @@ -12,7 +12,7 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_fodf_lobe_specific_metrics.py', + ret = script_runner.run('scil_bingham_metrics.py', '--help') assert ret.success @@ -22,7 +22,7 @@ def test_execution_processing(script_runner): in_bingham = os.path.join(get_home(), 'processing', 'fodf_bingham.nii.gz') - ret = script_runner.run('scil_fodf_lobe_specific_metrics.py', + ret = script_runner.run('scil_bingham_metrics.py', in_bingham, '--nbr_integration_steps', '10', '--processes', '1') @@ -36,7 +36,7 @@ def test_execution_processing_mask(script_runner): in_mask = os.path.join(get_home(), 'processing', 'seed.nii.gz') - ret = script_runner.run('scil_fodf_lobe_specific_metrics.py', + ret = script_runner.run('scil_bingham_metrics.py', in_bingham, '--nbr_integration_steps', '10', '--processes', '1', '--mask', in_mask, '-f') @@ -48,7 +48,7 @@ def test_execution_processing_not_all(script_runner): in_bingham = os.path.join(get_home(), 'processing', 'fodf_bingham.nii.gz') - ret = script_runner.run('scil_fodf_lobe_specific_metrics.py', + ret = script_runner.run('scil_bingham_metrics.py', in_bingham, '--nbr_integration_steps', '10', '--processes', '1', '--not_all', '--out_fs', 'fs.nii.gz', '-f') diff --git a/scripts/tests/test_bundle_mean_fixel_lobe_metric.py b/scripts/tests/test_bundle_mean_fixel_bingham_metric.py similarity index 88% rename from scripts/tests/test_bundle_mean_fixel_lobe_metric.py rename to scripts/tests/test_bundle_mean_fixel_bingham_metric.py index 37f812dc8d..5d81eec72e 100644 --- a/scripts/tests/test_bundle_mean_fixel_lobe_metric.py +++ b/scripts/tests/test_bundle_mean_fixel_bingham_metric.py @@ -13,7 +13,7 @@ def test_help_option(script_runner): ret = script_runner.run( - 'scil_bundle_mean_fixel_lobe_metric.py', '--help') + 'scil_bundle_mean_fixel_bingham_metric.py', '--help') assert ret.success @@ -25,7 +25,7 @@ def test_execution_processing(script_runner): in_bundles = os.path.join(get_home(), 'processing', 'tracking.trk') ret = script_runner.run( - 'scil_bundle_mean_fixel_lobe_metric.py', + 'scil_bundle_mean_fixel_bingham_metric.py', in_bundles, in_bingham, in_metric, 'fixel_mean_fd.nii.gz', '--length_weighting') From a2e3b34c2c201baadb9d94d1c5351f39ff20dda3 Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Thu, 14 Dec 2023 14:06:48 -0500 Subject: [PATCH 22/63] PEP8 --- scilpy/tests/arrays.py | 224 ++++++++++++++++++++--------------------- 1 file changed, 112 insertions(+), 112 deletions(-) diff --git a/scilpy/tests/arrays.py b/scilpy/tests/arrays.py index b40826db30..e9a196006c 100644 --- a/scilpy/tests/arrays.py +++ b/scilpy/tests/arrays.py @@ -245,131 +245,131 @@ fodf_3x3_order8_descoteaux07_filtered_cosine = np.array([[ - [[ 1.10403600e-01, -1.56274168e-02, -1.76220261e-03, -4.06611449e-02, - -1.30363567e-01, 3.75744696e-03, -9.97133892e-02, -3.49429274e-03, - 9.52583150e-02, 8.20238612e-03, -4.89889718e-03, 2.97695306e-03, - 4.70504991e-03, 2.74152194e-02, -1.06442787e-03, 3.09297306e-02, - 3.78406501e-02, 4.40385732e-03, 6.97472214e-02, -5.22356452e-03, - 5.54510683e-02, 4.19701656e-03, -4.61628754e-02, 1.04670478e-03, - -9.23817078e-02, -1.08910148e-02, 5.21920978e-04, -6.08726863e-03, - 5.44869381e-03, -2.53482527e-04, -2.27573225e-03, -1.40288007e-02, - 1.35667761e-03, -1.30331232e-02, 5.35207622e-04, -1.63039610e-02, - 7.97932344e-03, 8.02639457e-04, -1.21136261e-02, -1.83430175e-03, - -2.54608366e-02, 9.91489623e-04, -1.97747572e-02, -1.75031223e-03, - 1.49128199e-02, -1.43106403e-03, 2.66094509e-02, 8.57021815e-04, - 3.30743534e-02]], - - [[ 1.40072235e-01, 3.36392275e-03, -1.12255448e-04, -5.78225308e-03, - -1.77192188e-01, -6.19144118e-03, -1.17119585e-01, -1.16353192e-02, - 3.66205780e-02, -3.46907611e-02, -3.57179047e-03, -9.03952453e-03, - 3.77892854e-03, 7.63422898e-03, 1.62939211e-03, 1.45448683e-02, - 9.98680216e-02, -2.60934868e-03, 1.02401140e-01, -3.99776516e-03, - 6.98372955e-02, 8.57505830e-03, -1.55541774e-02, -6.67052505e-04, - -3.87320367e-02, 2.70807684e-02, 4.61517902e-04, 1.48157281e-02, - 2.48281726e-03, 3.77428961e-03, -3.20783604e-03, -5.32528133e-03, - -4.66592653e-03, -4.11151389e-03, 1.53491240e-04, -6.28303818e-03, - -2.60040931e-02, 9.76421739e-04, -3.20749919e-02, 6.17639701e-04, - -3.80348770e-02, 3.50055436e-03, -2.89460088e-02, -6.21961773e-03, - 6.33647814e-03, -3.64798932e-03, 1.61175330e-02, -8.68729069e-03, - 1.87904410e-02]], - - [[ 1.14020750e-01, -6.87045764e-03, -1.14232450e-03, 4.11661189e-02, - -1.63423278e-01, 1.42638757e-03, -1.02391114e-01, -3.10274637e-03, - -4.14102976e-02, -7.65200269e-05, -1.22479669e-03, 1.44271152e-04, - 1.02909411e-03, -2.53524591e-02, 8.46680097e-04, -2.91525407e-02, - 8.15595775e-02, 2.62140564e-03, 8.88886643e-02, 3.47353540e-03, - 6.60543440e-02, 4.57053464e-03, 2.22276194e-02, 1.59968571e-03, - 4.74753501e-02, 9.57052289e-03, -2.72297788e-03, 7.60147678e-04, - -2.24030127e-03, -2.25223735e-03, -1.58726069e-03, 1.44355725e-02, - -2.65302865e-03, 1.43794907e-02, -7.26650570e-04, 1.72575124e-02, - -2.26407370e-02, -3.96339977e-05, -2.79391904e-02, 1.96495129e-04, - -3.42417083e-02, 5.69135891e-04, -2.83188289e-02, -3.90851596e-03, - -9.86179763e-03, -1.53911048e-03, -1.44494506e-02, -3.56561464e-03, - -2.28537982e-02]]], - - [[[ 7.26406236e-02, 7.02981930e-03, -1.87881430e-03, -1.61256813e-02, - -2.64719814e-02, 1.79623468e-02, -2.81063801e-02, 6.54603124e-03, + [[1.10403600e-01, -1.56274168e-02, -1.76220261e-03, -4.06611449e-02, + -1.30363567e-01, 3.75744696e-03, -9.97133892e-02, -3.49429274e-03, + 9.52583150e-02, 8.20238612e-03, -4.89889718e-03, 2.97695306e-03, + 4.70504991e-03, 2.74152194e-02, -1.06442787e-03, 3.09297306e-02, + 3.78406501e-02, 4.40385732e-03, 6.97472214e-02, -5.22356452e-03, + 5.54510683e-02, 4.19701656e-03, -4.61628754e-02, 1.04670478e-03, + -9.23817078e-02, -1.08910148e-02, 5.21920978e-04, -6.08726863e-03, + 5.44869381e-03, -2.53482527e-04, -2.27573225e-03, -1.40288007e-02, + 1.35667761e-03, -1.30331232e-02, 5.35207622e-04, -1.63039610e-02, + 7.97932344e-03, 8.02639457e-04, -1.21136261e-02, -1.83430175e-03, + -2.54608366e-02, 9.91489623e-04, -1.97747572e-02, -1.75031223e-03, + 1.49128199e-02, -1.43106403e-03, 2.66094509e-02, 8.57021815e-04, + 3.30743534e-02]], + + [[1.40072235e-01, 3.36392275e-03, -1.12255448e-04, -5.78225308e-03, + -1.77192188e-01, -6.19144118e-03, -1.17119585e-01, -1.16353192e-02, + 3.66205780e-02, -3.46907611e-02, -3.57179047e-03, -9.03952453e-03, + 3.77892854e-03, 7.63422898e-03, 1.62939211e-03, 1.45448683e-02, + 9.98680216e-02, -2.60934868e-03, 1.02401140e-01, -3.99776516e-03, + 6.98372955e-02, 8.57505830e-03, -1.55541774e-02, -6.67052505e-04, + -3.87320367e-02, 2.70807684e-02, 4.61517902e-04, 1.48157281e-02, + 2.48281726e-03, 3.77428961e-03, -3.20783604e-03, -5.32528133e-03, + -4.66592653e-03, -4.11151389e-03, 1.53491240e-04, -6.28303818e-03, + -2.60040931e-02, 9.76421739e-04, -3.20749919e-02, 6.17639701e-04, + -3.80348770e-02, 3.50055436e-03, -2.89460088e-02, -6.21961773e-03, + 6.33647814e-03, -3.64798932e-03, 1.61175330e-02, -8.68729069e-03, + 1.87904410e-02]], + + [[1.14020750e-01, -6.87045764e-03, -1.14232450e-03, 4.11661189e-02, + -1.63423278e-01, 1.42638757e-03, -1.02391114e-01, -3.10274637e-03, + -4.14102976e-02, -7.65200269e-05, -1.22479669e-03, 1.44271152e-04, + 1.02909411e-03, -2.53524591e-02, 8.46680097e-04, -2.91525407e-02, + 8.15595775e-02, 2.62140564e-03, 8.88886643e-02, 3.47353540e-03, + 6.60543440e-02, 4.57053464e-03, 2.22276194e-02, 1.59968571e-03, + 4.74753501e-02, 9.57052289e-03, -2.72297788e-03, 7.60147678e-04, + -2.24030127e-03, -2.25223735e-03, -1.58726069e-03, 1.44355725e-02, + -2.65302865e-03, 1.43794907e-02, -7.26650570e-04, 1.72575124e-02, + -2.26407370e-02, -3.96339977e-05, -2.79391904e-02, 1.96495129e-04, + -3.42417083e-02, 5.69135891e-04, -2.83188289e-02, -3.90851596e-03, + -9.86179763e-03, -1.53911048e-03, -1.44494506e-02, -3.56561464e-03, + -2.28537982e-02]]], + + [[[7.26406236e-02, 7.02981930e-03, -1.87881430e-03, -1.61256813e-02, + -2.64719814e-02, 1.79623468e-02, -2.81063801e-02, 6.54603124e-03, 1.45408540e-02, -1.01358115e-02, 4.89154209e-04, -2.90726497e-03, - -5.52737463e-04, 8.21197934e-03, -1.38576779e-03, 1.91516929e-02, + -5.52737463e-04, 8.21197934e-03, -1.38576779e-03, 1.91516929e-02, 1.90580115e-02, 1.39116265e-03, 3.21379718e-02, -3.41384306e-02, 1.56080311e-02, 2.17425083e-03, -1.61882394e-02, 2.30982768e-03, - -4.68867105e-03, -6.71668742e-04, -1.23223614e-04, 2.34797057e-03, + -4.68867105e-03, -6.71668742e-04, -1.23223614e-04, 2.34797057e-03, 1.21306862e-03, 1.70700484e-03, 2.32198084e-03, -5.41906023e-03, 4.33958103e-03, -8.72831333e-03, 4.65559442e-04, -1.77434743e-02, - -3.67747484e-03, 2.22363618e-03, -6.92884020e-03, 1.54291482e-03, - -1.17597007e-02, -7.95446846e-04, -9.29108890e-05, 6.00740874e-03, - -8.13945278e-04, 6.46410176e-04, 3.25235814e-03, 5.35093467e-03, + -3.67747484e-03, 2.22363618e-03, -6.92884020e-03, 1.54291482e-03, + -1.17597007e-02, -7.95446846e-04, -9.29108890e-05, 6.00740874e-03, + -8.13945278e-04, 6.46410176e-04, 3.25235814e-03, 5.35093467e-03, 1.94505156e-03]], - [[ 9.81325940e-02, 1.52165204e-02, 1.24419995e-03, -1.28939516e-03, - -7.51292526e-02, -4.42181788e-03, -6.18455506e-02, 1.09682359e-02, - 1.47464674e-03, -3.31177366e-02, -2.92832122e-03, -1.02011317e-02, - 3.94651358e-04, 2.74508980e-03, 3.55769102e-03, 4.53198680e-03, - 1.65537934e-02, 6.31128591e-04, 3.60410320e-02, -2.02268925e-02, - 3.71610204e-02, 1.50104427e-02, -2.10404071e-02, -6.66322009e-03, - -2.82880695e-02, 2.37099397e-02, 4.67322046e-04, 1.69550184e-02, - 7.12799550e-04, 5.70549692e-03, 8.82880626e-04, -3.97702624e-03, - -4.87365758e-03, 1.15246875e-03, -2.71843590e-03, 1.70523580e-03, - 8.22004027e-03, -2.16495266e-03, -1.35291214e-02, 1.12046159e-02, - -1.70833051e-02, 9.56991767e-03, -2.20789814e-02, -5.21140434e-03, - -5.63453210e-03, 9.14118089e-03, 1.07848139e-02, -2.44076209e-03, - 7.20372163e-03]], - - [[ 7.82262155e-02, 4.89325589e-03, 9.69035921e-04, 2.48817773e-02, - -5.77909247e-02, 5.15396685e-03, -3.73859556e-02, -1.12652597e-02, - 1.46133523e-03, -1.93929247e-02, -1.66098338e-03, -5.40552612e-03, - 1.90785108e-03, -1.27736588e-02, -2.63140561e-04, -1.64589849e-02, - -1.67926849e-02, 1.37226384e-02, 3.40286811e-02, 1.60456988e-02, - 3.66665622e-02, -4.95865979e-03, -8.81294607e-04, -9.27511404e-04, - -5.56159406e-03, 1.85807717e-02, 4.28397297e-05, 1.14440177e-02, - -8.44202079e-04, 2.83058074e-03, -2.77968111e-03, 7.24897169e-03, - -3.92705015e-03, 8.88560041e-03, -1.14740758e-03, 1.25959122e-02, - 2.28839936e-02, -6.30383042e-04, 6.79235888e-03, 2.74534443e-03, - -1.08549693e-02, 2.89646379e-03, -1.59780244e-02, -7.49181391e-03, - -9.15354118e-03, -1.60175812e-03, -1.81015916e-03, -7.74073756e-04, - 5.32445729e-03]]], - - [[[ 1.34755663e-02, 1.28413970e-02, 1.49938841e-03, -6.47475638e-03, + [[9.81325940e-02, 1.52165204e-02, 1.24419995e-03, -1.28939516e-03, + -7.51292526e-02, -4.42181788e-03, -6.18455506e-02, 1.09682359e-02, + 1.47464674e-03, -3.31177366e-02, -2.92832122e-03, -1.02011317e-02, + 3.94651358e-04, 2.74508980e-03, 3.55769102e-03, 4.53198680e-03, + 1.65537934e-02, 6.31128591e-04, 3.60410320e-02, -2.02268925e-02, + 3.71610204e-02, 1.50104427e-02, -2.10404071e-02, -6.66322009e-03, + -2.82880695e-02, 2.37099397e-02, 4.67322046e-04, 1.69550184e-02, + 7.12799550e-04, 5.70549692e-03, 8.82880626e-04, -3.97702624e-03, + -4.87365758e-03, 1.15246875e-03, -2.71843590e-03, 1.70523580e-03, + 8.22004027e-03, -2.16495266e-03, -1.35291214e-02, 1.12046159e-02, + -1.70833051e-02, 9.56991767e-03, -2.20789814e-02, -5.21140434e-03, + -5.63453210e-03, 9.14118089e-03, 1.07848139e-02, -2.44076209e-03, + 7.20372163e-03]], + + [[7.82262155e-02, 4.89325589e-03, 9.69035921e-04, 2.48817773e-02, + -5.77909247e-02, 5.15396685e-03, -3.73859556e-02, -1.12652597e-02, + 1.46133523e-03, -1.93929247e-02, -1.66098338e-03, -5.40552612e-03, + 1.90785108e-03, -1.27736588e-02, -2.63140561e-04, -1.64589849e-02, + -1.67926849e-02, 1.37226384e-02, 3.40286811e-02, 1.60456988e-02, + 3.66665622e-02, -4.95865979e-03, -8.81294607e-04, -9.27511404e-04, + -5.56159406e-03, 1.85807717e-02, 4.28397297e-05, 1.14440177e-02, + -8.44202079e-04, 2.83058074e-03, -2.77968111e-03, 7.24897169e-03, + -3.92705015e-03, 8.88560041e-03, -1.14740758e-03, 1.25959122e-02, + 2.28839936e-02, -6.30383042e-04, 6.79235888e-03, 2.74534443e-03, + -1.08549693e-02, 2.89646379e-03, -1.59780244e-02, -7.49181391e-03, + -9.15354118e-03, -1.60175812e-03, -1.81015916e-03, -7.74073756e-04, + 5.32445729e-03]]], + + [[[1.34755663e-02, 1.28413970e-02, 1.49938841e-03, -6.47475638e-03, 2.52029988e-03, 4.88968824e-03, -5.42289754e-03, 1.60941684e-03, - -2.21852068e-03, 2.07330837e-03, 4.86536226e-03, 1.66978774e-04, - -6.71214109e-03, 1.27351141e-03, 9.82399935e-04, 2.22677827e-03, + -2.21852068e-03, 2.07330837e-03, 4.86536226e-03, 1.66978774e-04, + -6.71214109e-03, 1.27351141e-03, 9.82399935e-04, 2.22677827e-03, 2.71719965e-03, 2.06634356e-03, 2.94252961e-03, -8.67336125e-03, 3.55625935e-04, 1.36775123e-03, -2.08127408e-03, 2.52009045e-04, - -1.49716310e-04, -3.07585778e-04, 1.29094098e-04, -1.78327891e-04, - -3.55467543e-03, -1.03702801e-03, 5.01705916e-03, -1.46068291e-03, + -1.49716310e-04, -3.07585778e-04, 1.29094098e-04, -1.78327891e-04, + -3.55467543e-03, -1.03702801e-03, 5.01705916e-03, -1.46068291e-03, 2.29931595e-03, -2.32968172e-03, 7.43670432e-04, -6.92986861e-04, - -1.00498143e-03, 2.95173070e-05, -1.53450925e-03, 3.21191049e-04, - -1.58863466e-03, 2.65016963e-03, -4.11708281e-04, -7.87924032e-04, - -8.33831423e-04, 1.07967881e-03, 2.63612367e-04, 9.87115809e-04, + -1.00498143e-03, 2.95173070e-05, -1.53450925e-03, 3.21191049e-04, + -1.58863466e-03, 2.65016963e-03, -4.11708281e-04, -7.87924032e-04, + -8.33831423e-04, 1.07967881e-03, 2.63612367e-04, 9.87115809e-04, 1.21380213e-03]], - [[ 1.61478356e-02, 1.43728777e-02, 1.93974718e-03, -3.32623522e-03, - -1.17420191e-03, 3.21700718e-03, -8.75686341e-03, 1.37117517e-03, - -4.32829335e-03, -4.42773363e-03, 2.38329302e-03, -3.33902113e-03, - -4.12497515e-03, -6.89667443e-05, 2.41897821e-03, -4.26942050e-03, - -1.74191884e-03, 1.81570155e-03, 3.17867090e-03, -5.10889033e-03, - 4.49678511e-03, -2.60849304e-04, 1.84832760e-05, 4.61673598e-04, - -1.52379129e-03, 9.91300908e-04, 9.15433033e-04, 2.68803221e-03, - -2.02188630e-03, 2.73531219e-03, 3.98791591e-03, -1.49808319e-03, - 3.31828949e-04, 6.51188325e-04, -1.63160178e-03, 2.33668240e-03, - 2.93338642e-03, -5.06191672e-05, -1.03964494e-04, 3.87231978e-04, - -8.64134554e-04, 2.56989460e-03, -2.72089308e-03, -5.28755828e-04, - -2.52279382e-03, 1.50003583e-03, 2.73936763e-04, -7.08454308e-04, - 1.54615336e-03]], - - [[ 1.21073977e-02, 9.65647748e-03, 1.40513870e-03, 5.96978239e-03, - -5.83388001e-03, 1.18491063e-03, -7.42142705e-03, -1.61753221e-04, - 2.31758132e-03, -1.01991745e-02, 1.25833161e-03, -3.28820299e-03, - 9.77997070e-04, -3.33627171e-03, -7.64652162e-04, -4.00396819e-03, - -4.28747212e-03, 2.06971554e-03, 4.22446175e-03, 1.39004952e-03, - 5.11484010e-03, -2.51029606e-04, -1.53415978e-03, -9.05476137e-04, - -3.72103599e-03, 1.62807768e-03, 2.07196281e-03, 5.50544884e-03, - 3.46282883e-04, 3.54274790e-03, -1.15406623e-03, 1.05391112e-03, - -1.00296491e-03, 9.83314237e-04, -5.52227484e-04, -2.60564565e-04, - 3.63180002e-03, 1.15833727e-03, 2.31403574e-03, 6.13974106e-04, - -7.09616975e-04, -1.19250538e-04, -3.14249677e-03, -2.51516937e-04, - -7.67022089e-04, -3.86918453e-04, 7.29633770e-04, -1.56990164e-04, - 9.27285029e-04]]]]) + [[1.61478356e-02, 1.43728777e-02, 1.93974718e-03, -3.32623522e-03, + -1.17420191e-03, 3.21700718e-03, -8.75686341e-03, 1.37117517e-03, + -4.32829335e-03, -4.42773363e-03, 2.38329302e-03, -3.33902113e-03, + -4.12497515e-03, -6.89667443e-05, 2.41897821e-03, -4.26942050e-03, + -1.74191884e-03, 1.81570155e-03, 3.17867090e-03, -5.10889033e-03, + 4.49678511e-03, -2.60849304e-04, 1.84832760e-05, 4.61673598e-04, + -1.52379129e-03, 9.91300908e-04, 9.15433033e-04, 2.68803221e-03, + -2.02188630e-03, 2.73531219e-03, 3.98791591e-03, -1.49808319e-03, + 3.31828949e-04, 6.51188325e-04, -1.63160178e-03, 2.33668240e-03, + 2.93338642e-03, -5.06191672e-05, -1.03964494e-04, 3.87231978e-04, + -8.64134554e-04, 2.56989460e-03, -2.72089308e-03, -5.28755828e-04, + -2.52279382e-03, 1.50003583e-03, 2.73936763e-04, -7.08454308e-04, + 1.54615336e-03]], + + [[1.21073977e-02, 9.65647748e-03, 1.40513870e-03, 5.96978239e-03, + -5.83388001e-03, 1.18491063e-03, -7.42142705e-03, -1.61753221e-04, + 2.31758132e-03, -1.01991745e-02, 1.25833161e-03, -3.28820299e-03, + 9.77997070e-04, -3.33627171e-03, -7.64652162e-04, -4.00396819e-03, + -4.28747212e-03, 2.06971554e-03, 4.22446175e-03, 1.39004952e-03, + 5.11484010e-03, -2.51029606e-04, -1.53415978e-03, -9.05476137e-04, + -3.72103599e-03, 1.62807768e-03, 2.07196281e-03, 5.50544884e-03, + 3.46282883e-04, 3.54274790e-03, -1.15406623e-03, 1.05391112e-03, + -1.00296491e-03, 9.83314237e-04, -5.52227484e-04, -2.60564565e-04, + 3.63180002e-03, 1.15833727e-03, 2.31403574e-03, 6.13974106e-04, + -7.09616975e-04, -1.19250538e-04, -3.14249677e-03, -2.51516937e-04, + -7.67022089e-04, -3.86918453e-04, 7.29633770e-04, -1.56990164e-04, + 9.27285029e-04]]]]) # 3D array with slices 2-7 with values from 1-6 (with borders of 0) ref_in_labels = np.zeros((10, 10, 10), dtype=np.uint16) From f6e0ea16c5cb2d1ae5b6dcbaddf72c198bd23f28 Mon Sep 17 00:00:00 2001 From: frheault Date: Thu, 14 Dec 2023 14:10:10 -0500 Subject: [PATCH 23/63] Addressed Antoine comments --- scripts/scil_json_harmonize_entries.py | 2 +- scripts/scil_json_merge_entries.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/scil_json_harmonize_entries.py b/scripts/scil_json_harmonize_entries.py index a735089857..c246d3beb7 100755 --- a/scripts/scil_json_harmonize_entries.py +++ b/scripts/scil_json_harmonize_entries.py @@ -4,7 +4,7 @@ """ This script will harmonize a json file by adding missing keys and values that differs between the different layers of the dictionary. -This is use only (for now) in Aggregate_All_* portion of tractometry-flow, +This is used only (for now) in Aggregate_All_* portion of tractometry-flow, to counter the problem of missing bundles/metrics/lesions between subjects. The most common use case is when specific subjects have missing bundles diff --git a/scripts/scil_json_merge_entries.py b/scripts/scil_json_merge_entries.py index 7f7e624f57..f11eda5758 100755 --- a/scripts/scil_json_merge_entries.py +++ b/scripts/scil_json_merge_entries.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -""" Merge multiple json file into a single one. +""" Merge multiple json files into a single one. Typically used during the tractometry pipeline. Without option it will simply merge all entries at the top level, the top From d224c7b9425a6abe03309dd8a8f48719f688214f Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Thu, 14 Dec 2023 14:14:29 -0500 Subject: [PATCH 24/63] ENH: proper legacy handling --- scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py | 2 +- scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py | 2 +- scripts/legacy/tests/test_legacy_scripts.py | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py b/scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py index 33592e88fd..4149bfa0a8 100755 --- a/scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py +++ b/scripts/legacy/scil_convert_gradients_fsl_to_mrtrix.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- from scilpy.io.deprecator import deprecate_script -from scripts.scil_gradients_convert_fsl_to_mrtrix import main as new_main +from scripts.scil_gradients_convert import main as new_main DEPRECATION_MSG = """ diff --git a/scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py b/scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py index 0318cbd848..acc1ed6a78 100755 --- a/scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py +++ b/scripts/legacy/scil_convert_gradients_mrtrix_to_fsl.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- from scilpy.io.deprecator import deprecate_script -from scripts.scil_gradients_convert_mrtrix_to_fsl import main as new_main +from scripts.scil_gradients_convert import main as new_main DEPRECATION_MSG = """ diff --git a/scripts/legacy/tests/test_legacy_scripts.py b/scripts/legacy/tests/test_legacy_scripts.py index de2d79444b..52d08518cf 100644 --- a/scripts/legacy/tests/test_legacy_scripts.py +++ b/scripts/legacy/tests/test_legacy_scripts.py @@ -68,6 +68,8 @@ "scil_compute_todi.py", "scil_concatenate_dwi.py", "scil_connectivity_math.py", + "scil_convert_gradients_fsl_to_mrtrix.py", + "scil_convert_gradients_mrtrix_to_fsl.py", "scil_convert_fdf.py", "scil_convert_json_to_xlsx.py", "scil_convert_rgb.py", From 470f8fda1ed66a1f0d2a3dd0ef728a06b68464c0 Mon Sep 17 00:00:00 2001 From: AntoineTheb Date: Thu, 14 Dec 2023 14:17:33 -0500 Subject: [PATCH 25/63] ENH: cleanup --- scripts/scil_gradients_convert.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/scripts/scil_gradients_convert.py b/scripts/scil_gradients_convert.py index 722267f105..90b5b06300 100755 --- a/scripts/scil_gradients_convert.py +++ b/scripts/scil_gradients_convert.py @@ -52,16 +52,17 @@ def main(): assert_gradients_filenames_valid(parser, args.gradients, input_is_fsl) assert_inputs_exist(parser, args.gradients) - if not input_is_fsl: - output = [args.output + '.bval', args.output + '.bvec'] - assert_outputs_exist(parser, args, output[0], output[1]) - mrtrix_b = args.gradients[0] - mrtrix2fsl(mrtrix_b, args.output) - else: + if input_is_fsl: output = args.output + '.b' assert_outputs_exist(parser, args, output) fsl_bval, fsl_bvec = args.gradients fsl2mrtrix(fsl_bval, fsl_bvec, args.output) + else: + output = [args.output + '.bval', args.output + '.bvec'] + assert_outputs_exist(parser, args, output[0]) + assert_outputs_exist(parser, args, output[1]) + mrtrix_b = args.gradients[0] + mrtrix2fsl(mrtrix_b, args.output) if __name__ == "__main__": From 96c5c1d2db55de580e249d8f1a6169c0ef36a8eb Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 14 Dec 2023 14:20:03 -0500 Subject: [PATCH 26/63] 3rd round of helps + pep8 --- scripts/scil_lesions_info.py | 2 ++ scripts/scil_plot_stats_per_point.py | 2 ++ scripts/scil_stats_group_comparison.py | 2 ++ scripts/scil_tractogram_compute_TODI.py | 2 ++ scripts/scil_tractogram_compute_density_map.py | 5 ++++- 5 files changed, 12 insertions(+), 1 deletion(-) diff --git a/scripts/scil_lesions_info.py b/scripts/scil_lesions_info.py index 3445574169..92c95e8c07 100755 --- a/scripts/scil_lesions_info.py +++ b/scripts/scil_lesions_info.py @@ -9,6 +9,8 @@ To be considered a valid lesion, the lesion volume must be at least min_lesion_vol mm3. This avoid the detection of thousand of single voxel lesions if an automatic lesion segmentation tool is used. + +Formerly: scil_analyse_lesions_load.py """ import argparse diff --git a/scripts/scil_plot_stats_per_point.py b/scripts/scil_plot_stats_per_point.py index fba786ec66..8cf2b1611e 100755 --- a/scripts/scil_plot_stats_per_point.py +++ b/scripts/scil_plot_stats_per_point.py @@ -6,6 +6,8 @@ tractometry-flow. WARNING: For population, the displayed STDs is only showing the variation of the means. It does not account intra-subject STDs. + +Formerly: scil_plot_mean_std_per_point.py """ import argparse diff --git a/scripts/scil_stats_group_comparison.py b/scripts/scil_stats_group_comparison.py index fc23daacab..79ca369006 100755 --- a/scripts/scil_stats_group_comparison.py +++ b/scripts/scil_stats_group_comparison.py @@ -32,6 +32,8 @@ 2, test the group difference two by two. 6) Generate the result for all metrics and bundles + +Formerly: scil_group_comparison.py """ import argparse diff --git a/scripts/scil_tractogram_compute_TODI.py b/scripts/scil_tractogram_compute_TODI.py index 8bf6a9f5e6..de921155c7 100755 --- a/scripts/scil_tractogram_compute_TODI.py +++ b/scripts/scil_tractogram_compute_TODI.py @@ -6,6 +6,8 @@ (to support compressed streamlines). This script can afterwards output a Track Density Image (TDI) or a TODI with SF or SH representation, based on streamlines' segments. + +Formerly: scil_compute_todi.py """ import argparse diff --git a/scripts/scil_tractogram_compute_density_map.py b/scripts/scil_tractogram_compute_density_map.py index 15856ea340..1210056334 100755 --- a/scripts/scil_tractogram_compute_density_map.py +++ b/scripts/scil_tractogram_compute_density_map.py @@ -7,6 +7,8 @@ A specific value can be assigned instead of using the tract count. This script correctly handles compressed streamlines. + +Formerly: scil_compute_streamlines_density_map.py """ import argparse @@ -66,7 +68,8 @@ def main(): dtype_to_use = np.uint8 streamline_count[streamline_count > 0] = args.binary - img = nib.Nifti1Image(streamline_count.astype(dtype_to_use), transformation) + img = nib.Nifti1Image(streamline_count.astype(dtype_to_use), + transformation) nib.save(img, args.out_img) From 6991fc48006ee578c7587160bee6b8864e1d7122 Mon Sep 17 00:00:00 2001 From: grahamlittlephd Date: Thu, 14 Dec 2023 14:20:19 -0500 Subject: [PATCH 27/63] surface convert doc --- scripts/scil_surface_convert.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/scil_surface_convert.py b/scripts/scil_surface_convert.py index 625f4edc28..0895ff0839 100755 --- a/scripts/scil_surface_convert.py +++ b/scripts/scil_surface_convert.py @@ -2,8 +2,11 @@ # -*- coding: utf-8 -*- """ -Script to convert a surface (FreeSurfer or VTK supported). +Script to convert surface formats + +Supported formats: ".vtk", ".vtp", ".ply", ".stl", ".xml", ".obj" + and FreeSurfer surfaces > scil_surface_convert.py surf.vtk converted_surf.ply """ @@ -36,7 +39,7 @@ def _build_arg_parser(): help='Input a surface (FreeSurfer or supported by VTK).') p.add_argument('out_surface', - help='Output flipped surface (formats supported by VTK).') + help='Output surface (formats supported by VTK).') p.add_argument('--xform', help='Path of the copy-paste output from mri_info \n' From 15e634a250b4cc03192bd7ad41f29ca5f8d9196f Mon Sep 17 00:00:00 2001 From: grahamlittlephd Date: Thu, 14 Dec 2023 14:32:40 -0500 Subject: [PATCH 28/63] surface_flip doc update --- scripts/scil_surface_flip.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/scripts/scil_surface_flip.py b/scripts/scil_surface_flip.py index 793929e282..2878f1b6c3 100755 --- a/scripts/scil_surface_flip.py +++ b/scripts/scil_surface_flip.py @@ -2,15 +2,10 @@ # -*- coding: utf-8 -*- """ -Script to flip and reverse a surface (FreeSurfer or VTK supported). -Can be used to flip in chosen axes (x, y or z), -it can also flip inside out the surface orientation (normal). +Script to flip a given surface (FreeSurfer or VTK supported). - -Best usage for FreeSurfer to LPS vtk (for MI-Brain): -!!! important FreeSurfer surfaces must be in their respective folder !!! -> mris_convert --to-scanner lh.white lh.white.vtk -> scil_surface_flip.py lh.white.vtk lh_white_lps.vtk x y +Can flip vertices coordinates around a chosen (or multiple) axes (x, y or z) +as well as reverse the orientation of the surface normals. """ import argparse @@ -41,8 +36,9 @@ def _build_arg_parser(): p.add_argument('axes', choices=['x', 'y', 'z', 'n'], nargs='+', - help='The axes (or normal orientation) you want to flip.' - ' eg: to flip the x and y axes use: x y.') + help='The axes you want to flip.' + ' eg: to flip the x and y axes use: x y.' + ' to reverse the surface normals use: n') add_overwrite_arg(p) return p From a24bce69ab5ddcca84c31d30d8e41a5b72168a66 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Thu, 14 Dec 2023 14:39:09 -0500 Subject: [PATCH 29/63] fix antoine's comment --- scripts/scil_bundle_pairwise_comparison.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_bundle_pairwise_comparison.py b/scripts/scil_bundle_pairwise_comparison.py index 31ae4a52d0..bf57a3f455 100755 --- a/scripts/scil_bundle_pairwise_comparison.py +++ b/scripts/scil_bundle_pairwise_comparison.py @@ -95,7 +95,7 @@ def load_data_tmp_saving(args): init_only = args[2] disable_centroids = args[3] - # Since data is often re-use when comparing multiple bundles, anything + # Since data is often re-used when comparing multiple bundles, anything # that can be computed once is saved temporarily and simply loaded on # demand hash_tmp = hashlib.md5(filename.encode()).hexdigest() From 70fac398d77a29cc55d228e2c01980030bf1bcf5 Mon Sep 17 00:00:00 2001 From: grahamlittlephd Date: Thu, 14 Dec 2023 14:40:26 -0500 Subject: [PATCH 30/63] surface_smooth doc string --- scripts/scil_surface_smooth.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/scripts/scil_surface_smooth.py b/scripts/scil_surface_smooth.py index 783e8f3604..03116f0d3b 100755 --- a/scripts/scil_surface_smooth.py +++ b/scripts/scil_surface_smooth.py @@ -4,11 +4,13 @@ """ Script to smooth a surface with a Laplacian blur. -step_size from 0.1 to 10 is recommended -Smoothing_time = step_size * nb_steps - [1, 10] for a small smoothing - [10, 100] for a moderate smoothing - [100, 1000] for a big smoothing +For a standard FreeSurfer white matter mesh a step_size from 0.1 to 10 +is recommended + +Smoothing time = step_size * nb_steps + small amount of smoothing [step_size 1, nb_steps 10] + moderate amount of smoothing [step_size 10, nb_steps 100] + large amount of smoothing [step_size 100, nb_steps 1000] """ import argparse @@ -41,7 +43,7 @@ def _build_arg_parser(): help='Output smoothed surface (.vtk).') p.add_argument('-m', '--vts_mask', - help='Vertices mask, where to apply the flow (.npy).') + help='Vertex mask, no smoothing where mask equals 0 (.npy).') p.add_argument('-n', '--nb_steps', type=int, default=2, help='Number of steps for laplacian smooth [%(default)s].') @@ -63,10 +65,10 @@ def main(): # Check smoothing parameters if args.nb_steps < 1: - parser.error("Number of steps should be strictly positive") + parser.error("Number of steps should be positive") if args.step_size <= 0.0: - parser.error("Step size should be strictly positive") + parser.error("Step size should be positive") if args.verbose: logging.getLogger().setLevel(logging.DEBUG) From bab3ace60f8a0d7c5c90a9e3dc20f775a248d7c7 Mon Sep 17 00:00:00 2001 From: grahamlittlephd Date: Thu, 14 Dec 2023 14:55:30 -0500 Subject: [PATCH 31/63] surface_apply_transform doc --- scripts/scil_surface_apply_transform.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/scripts/scil_surface_apply_transform.py b/scripts/scil_surface_apply_transform.py index 6a635d4ac6..dbb7f50a5a 100755 --- a/scripts/scil_surface_apply_transform.py +++ b/scripts/scil_surface_apply_transform.py @@ -2,18 +2,19 @@ # -*- coding: utf-8 -*- """ -Script to load and transform a surface (FreeSurfer or VTK supported), -This script is using ANTs transform (affine.txt, warp.nii.gz). +Script to apply a transform to a surface (FreeSurfer or VTK supported), +using output from ANTs registration tools (i.e. affine.txt, warp.nii.gz). -Best usage with ANTs from T1 to b0: +Example usage from T1 to b0 using ANTs transforms: > ConvertTransformFile 3 output0GenericAffine.mat vtk_transfo.txt --hm > scil_surface_apply_transform.py lh_white_lps.vtk affine.txt lh_white_b0.vtk\\ --ants_warp warp.nii.gz -The input surface needs to be in *T1 world LPS* coordinates +Important: The input surface needs to be in *T1 world LPS* coordinates (aligned over the T1 in MI-Brain). -The script will use the linear affine first and then the warp image from ANTs. -The resulting surface should be aligned *b0 world LPS* coordinates + +The script will use the linear affine first and then the warp image. +The resulting surface will be in *b0 world LPS* coordinates (aligned over the b0 in MI-Brain). """ @@ -50,7 +51,7 @@ def _build_arg_parser(): help='Output surface (.vtk).') p.add_argument('--ants_warp', - help='Warp image from ANTs (NIfTI format).') + help='Warp image from ANTs (Nifti image).') add_overwrite_arg(p) return p From 7ed1939817201beb01251ea46dbed9dd9e87151c Mon Sep 17 00:00:00 2001 From: grahamlittlephd Date: Thu, 14 Dec 2023 14:58:32 -0500 Subject: [PATCH 32/63] surface_flip doc --- scripts/scil_surface_flip.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/scil_surface_flip.py b/scripts/scil_surface_flip.py index 2878f1b6c3..3d5c24aa01 100755 --- a/scripts/scil_surface_flip.py +++ b/scripts/scil_surface_flip.py @@ -4,8 +4,8 @@ """ Script to flip a given surface (FreeSurfer or VTK supported). -Can flip vertices coordinates around a chosen (or multiple) axes (x, y or z) -as well as reverse the orientation of the surface normals. +Can flip vertice coordinates around a single or multiple axes +Can also be used to reverse the orientation of the surface normals. """ import argparse From 1ffa67b6655ad016d3455c6e788fb4296400dd60 Mon Sep 17 00:00:00 2001 From: grahamlittlephd Date: Thu, 14 Dec 2023 14:59:31 -0500 Subject: [PATCH 33/63] surface_flip doc --- scripts/scil_surface_flip.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_surface_flip.py b/scripts/scil_surface_flip.py index 3d5c24aa01..6f63e4c7df 100755 --- a/scripts/scil_surface_flip.py +++ b/scripts/scil_surface_flip.py @@ -4,7 +4,7 @@ """ Script to flip a given surface (FreeSurfer or VTK supported). -Can flip vertice coordinates around a single or multiple axes +Can flip surface coordinates around a single or multiple axes Can also be used to reverse the orientation of the surface normals. """ From 3f8671fa536bb832a0a4b62acd3ee0d37a4d37ad Mon Sep 17 00:00:00 2001 From: grahamlittlephd Date: Fri, 15 Dec 2023 09:40:08 -0500 Subject: [PATCH 34/63] long line --- .python-version | 2 +- scripts/scil_surface_smooth.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.python-version b/.python-version index 7c7a975f4c..c8cfe39591 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.10 \ No newline at end of file +3.10 diff --git a/scripts/scil_surface_smooth.py b/scripts/scil_surface_smooth.py index 03116f0d3b..a4b151cf72 100755 --- a/scripts/scil_surface_smooth.py +++ b/scripts/scil_surface_smooth.py @@ -43,7 +43,7 @@ def _build_arg_parser(): help='Output smoothed surface (.vtk).') p.add_argument('-m', '--vts_mask', - help='Vertex mask, no smoothing where mask equals 0 (.npy).') + help='Vertex mask no smoothing where mask equals 0 (.npy).') p.add_argument('-n', '--nb_steps', type=int, default=2, help='Number of steps for laplacian smooth [%(default)s].') From c5b5b43fe08fd0a445d9b8b4664fdf0a380fbfe8 Mon Sep 17 00:00:00 2001 From: ThoumyreStanislas Date: Fri, 15 Dec 2023 11:14:24 -0500 Subject: [PATCH 35/63] Add_version_scilpy --- scilpy/io/utils.py | 12 +++++------- scripts/scil_search_keywords.py | 3 +-- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/scilpy/io/utils.py b/scilpy/io/utils.py index f3a12746a7..c95bfcbc89 100644 --- a/scilpy/io/utils.py +++ b/scilpy/io/utils.py @@ -231,6 +231,11 @@ def add_force_b0_arg(parser): def add_verbose_arg(parser): parser.add_argument('-v', action='store_true', dest='verbose', help='If set, produces verbose output.') + + version = importlib.metadata.version('scilpy') + + logging.getLogger().setLevel(logging.INFO) + logging.info("Scilpy version: {}".format(version)) def add_bbox_arg(parser): @@ -879,10 +884,3 @@ def get_default_screenshotting_data(args): masks_colors -def add_scilpy_version(verbose = True): - - version = importlib.metadata.version('scilpy') - - logging.getLogger().setLevel(logging.INFO) - logging.info("Scilpy version: {}".format(version)) - diff --git a/scripts/scil_search_keywords.py b/scripts/scil_search_keywords.py index 1cf8ed38eb..e9a23d9a6c 100755 --- a/scripts/scil_search_keywords.py +++ b/scripts/scil_search_keywords.py @@ -22,7 +22,7 @@ import numpy as np -from scilpy.io.utils import add_scilpy_version, add_verbose_arg +from scilpy.io.utils import add_verbose_arg RED = '\033[31m' BOLD = '\033[1m' @@ -42,7 +42,6 @@ def _build_arg_parser(): 'instead of looking only at the docstring. (warning: ' 'much slower).') - add_scilpy_version() add_verbose_arg(p) return p From 18b17969ce3e653acc8b35e6f138b880c5286449 Mon Sep 17 00:00:00 2001 From: ThoumyreStanislas Date: Fri, 15 Dec 2023 11:20:53 -0500 Subject: [PATCH 36/63] correction pep8 --- scilpy/io/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scilpy/io/utils.py b/scilpy/io/utils.py index c95bfcbc89..566b8bdb3f 100644 --- a/scilpy/io/utils.py +++ b/scilpy/io/utils.py @@ -231,7 +231,7 @@ def add_force_b0_arg(parser): def add_verbose_arg(parser): parser.add_argument('-v', action='store_true', dest='verbose', help='If set, produces verbose output.') - + version = importlib.metadata.version('scilpy') logging.getLogger().setLevel(logging.INFO) From 440871f67efc2796f3418f4cccff9711b96022d5 Mon Sep 17 00:00:00 2001 From: CHrlS98 Date: Fri, 15 Dec 2023 11:44:53 -0500 Subject: [PATCH 37/63] Unit tests for reconst.bingham --- scilpy/reconst/tests/test_bingham.py | 63 +++++++-- scilpy/tests/arrays.py | 196 +++++++++++++++++++++++++++ 2 files changed, 247 insertions(+), 12 deletions(-) diff --git a/scilpy/reconst/tests/test_bingham.py b/scilpy/reconst/tests/test_bingham.py index a4f8550542..ae672770a0 100644 --- a/scilpy/reconst/tests/test_bingham.py +++ b/scilpy/reconst/tests/test_bingham.py @@ -1,31 +1,70 @@ # -*- coding: utf-8 -*- +import numpy as np +from dipy.core.sphere import hemi_icosahedron + +from scilpy.tests.arrays import (fodf_3x3_order8_descoteaux07, + fodf_3x3_bingham, fodf_3x3_bingham_sf, + fodf_3x3_bingham_fd, fodf_3x3_bingham_fs, + fodf_3x3_bingham_ff, fodf_3x3_bingham_peaks) +from scilpy.reconst.bingham import (bingham_fit_sh, bingham_to_sf, + bingham_to_peak_direction, + compute_fiber_density, + compute_fiber_fraction, + compute_fiber_spread) def test_bingham_to_sf(): - # toDO - pass + in_bingham = fodf_3x3_bingham.copy() + sphere = hemi_icosahedron + + sf = bingham_to_sf(in_bingham, sphere.vertices) + assert np.allclose(sf, fodf_3x3_bingham_sf) def test_bingham_to_peak_direction(): - # toDO - pass + bingham = fodf_3x3_bingham.copy() + + peaks = bingham_to_peak_direction(bingham) + assert np.allclose(peaks, fodf_3x3_bingham_peaks) def test_bingham_fit_sh(): - # toDO - pass + in_sh = fodf_3x3_order8_descoteaux07.copy() + max_lobes = 3 + abs_th = 0.0 + rel_th = 0.1 + min_sep_angle = 25 + max_fit_angle = 15 + mask = None + nbr_processes = 1 + + bingham_arr = bingham_fit_sh(in_sh, max_lobes, abs_th, rel_th, + min_sep_angle, max_fit_angle, mask, + nbr_processes) + + assert np.allclose(bingham_arr, fodf_3x3_bingham) def test_compute_fiber_density(): - # toDO - pass + bingham = fodf_3x3_bingham.copy() + m = 50 + mask = None + nbr_processes = 1 + + fd = compute_fiber_density(bingham, m, mask, nbr_processes) + assert np.allclose(fodf_3x3_bingham_fd, fd) def test_compute_fiber_spread(): - # toDO - pass + fd = fodf_3x3_bingham_fd.copy() + bingham = fodf_3x3_bingham.copy() + + fs = compute_fiber_spread(bingham, fd) + assert np.allclose(fs, fodf_3x3_bingham_fs) def test_compute_fiber_fraction(): - # toDO - pass + fd = fodf_3x3_bingham_fd.copy() + + ff = compute_fiber_fraction(fd) + assert np.allclose(ff, fodf_3x3_bingham_ff) diff --git a/scilpy/tests/arrays.py b/scilpy/tests/arrays.py index 8def308349..6d3985a5ab 100644 --- a/scilpy/tests/arrays.py +++ b/scilpy/tests/arrays.py @@ -243,6 +243,202 @@ 2.45072224e-04, 2.64311350e-04, -1.81628898e-04, 6.73057957e-04]]]]) +# Bingham fit +fodf_3x3_bingham = np.array([[ + [[[0.8865956, -1.01305598, 0.3570712, 4.31487453, -4.09539035, + 1.50380415, -1.08597013], + [0., 0., 0., 0., 0., 0., 0.], + [0., 0., 0., 0., 0., 0., 0.]]], + + [[[0.85683347, -3.44257945, 0.24681098, 2.7063325, 3.0255078, + -0.39132021, 3.88427259], + [0.09353532, -2.07887656, -0.23646431, 2.54845777, 3.35888907, + -12.87836086, 1.54502972], + [0., 0., 0., 0., 0., 0., 0.]]], + + [[[0.95034369, 3.82616343, 0.66640788, 2.10370133, -2.29257285, + -0.44777004, 4.31152262], + [0., 0., 0., 0., 0., 0., 0.], + [0., 0., 0., 0., 0., 0., 0.]]]], + + [[[[0.18908378, -1.3861731, 3.03919906, -1.22601547, 1.29995979, + 2.13770894, 3.82944086], + [0.12418191, 2.74008198, -0.06444388, -1.57097804, 3.1965494, + 0.87367118, 5.53954571], + [0.12104527, 3.61536971, -2.68105852, -1.64809093, 3.14649051, + 4.40167953, -0.25813741]]], + + [[[0.27522583, -3.48810133, 0.8774683, 1.78047152, 2.43352883, + -0.35692355, 4.94340074], + [0.20108258, 4.56868628, 3.66376182, 0.03988314, -3.4448441, + 4.25196219, 4.01761794], + [0.16118525, 1.50428971, 2.4830825, 0.17002741, 1.20848685, + -1.22853768, 7.24969027]]], + + [[[0.2612878, 4.36835975, 2.85751351, 2.79826519, -2.49064595, + -1.38304902, 5.30046931], + [0.23787563, -0.23406115, -0.25916585, 3.62612255, 5.29582601, + -3.54583996, 0.08841027], + [0.14373517, 2.05462185, 1.39279777, 0.56035226, 3.45503345, + -6.17007375, 2.66774636]]]], + + [[[[0., 0., 0., 0., 0., 0., 0.], + [0., 0., 0., 0., 0., 0., 0.], + [0., 0., 0., 0., 0., 0., 0.]]], + + [[[0., 0., 0., 0., 0., 0., 0.], + [0., 0., 0., 0., 0., 0., 0.], + [0., 0., 0., 0., 0., 0., 0.]]], + + [[[0., 0., 0., 0., 0., 0., 0.], + [0., 0., 0., 0., 0., 0., 0.], + [0., 0., 0., 0., 0., 0., 0.]]]]]) + +# Bingham array as spherical functions +fodf_3x3_bingham_sf = np.array([[[[ + [1.59484877e-01, 1.18932451e-02, 1.17911602e-02, 1.19221973e-02, + 1.79084769e-01, 1.73800247e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00]]], + + [[[4.61621144e-02, 2.12595934e-02, 6.34163521e-03, 1.00096474e-02, + 2.58251608e-01, 1.95556145e-01], + [1.00636347e-02, 4.07760320e-05, 3.24542499e-02, 3.66369494e-03, + 2.92196637e-05, 1.47902411e-06], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00]]], + + [[[1.89202646e-02, 7.72759893e-02, 1.04786067e-02, 7.41384901e-03, + 2.44206672e-01, 2.13048219e-01], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00]]]], + + [[[[6.07104883e-02, 2.16483588e-02, 2.62426053e-03, 4.32045162e-02, + 2.64859330e-03, 9.99567222e-03], + [4.88698419e-03, 9.58808359e-03, 2.20309972e-04, 2.60379323e-03, + 1.22570327e-02, 4.96320594e-02], + [6.83268085e-04, 1.77794115e-03, 7.85952079e-02, 6.50364754e-03, + 1.38528228e-03, 4.95163615e-03]]], + + [[[3.03020624e-02, 5.84492588e-03, 1.15539675e-03, 3.57417939e-03, + 5.23586737e-02, 5.93138461e-02], + [5.84873154e-04, 2.04800390e-03, 4.99198602e-02, 1.37601372e-03, + 2.97576042e-04, 2.80472189e-02], + [1.58809156e-02, 1.10644235e-01, 2.39822852e-04, 2.29946460e-03, + 1.09010927e-02, 1.66605591e-03]]], + + [[[6.85402639e-04, 8.28014763e-02, 1.18689001e-03, 9.40716640e-04, + 1.29906371e-02, 1.64458673e-02], + [7.72311584e-02, 4.09197783e-04, 5.93833482e-03, 4.46406166e-03, + 2.73251646e-02, 1.57738201e-02], + [1.26632583e-02, 6.09482433e-04, 6.09099345e-03, 1.21231123e-01, + 8.59146396e-03, 3.02273873e-04]]]], + + [[[[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00]]], + + [[[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00]]], + + [[[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00], + [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, + 0.00000000e+00, 0.00000000e+00]]]]]) + +fodf_3x3_bingham_fd = np.array([ + [[[1.47125431, 0., 0.]], + [[1.35616136, 0.10115714, 0.]], + [[1.50423449, 0., 0.]]], + + [[[0.35457353, 0.2050584, 0.17187861]], + [[0.43043627, 0.22323813, 0.25806502]], + [[0.30886279, 0.36314695, 0.24672844]]], + + [[[0., 0., 0.]], + [[0., 0., 0.]], + [[0., 0., 0.]]]] +) + +fodf_3x3_bingham_fs = np.array([ + [[[1.65944238, 0., 0.]], + [[1.58275955, 1.08148601, 0.]], + [[1.58283209, 0., 0.]]], + + [[[1.87521918, 1.65127433, 1.41995313]], + [[1.56393849, 1.11018135, 1.60104613]], + [[1.18207888, 1.52662528, 1.71654884]]], + + [[[0., 0., 0.]], + [[0., 0., 0.]], + [[0., 0., 0.]]]] +) + +fodf_3x3_bingham_ff = np.array([ + [[[1., 0., 0.]], + [[0.9305868, 0.0694132, 0.]], + [[1., 0., 0.]]], + + [[[0.48471418, 0.28032187, 0.23496396]], + [[0.47210449, 0.24484861, 0.2830469]], + [[0.33618151, 0.39526707, 0.26855142]]], + + [[[0., 0., 0.]], + [[0., 0., 0.]], + [[0., 0., 0.]]]] +) + +fodf_3x3_bingham_peaks = np.array([ + [[[[-0.34397547, -0.93897366, -0.00305593], + [0., 0., 0.], + [0., 0., 0.]]], + + [[[0.09314371, 0.9952668, 0.02771716], + [0.73461195, 0.26645789, 0.62397555], + [0., 0., 0.]]], + + [[[0.17614917, -0.98432624, -0.0085624], + [0., 0., 0.], + [0., 0., 0.]]]], + + [[[[0.87605833, 0.22821013, -0.42478458], + [0.04979924, -0.99058828, 0.12749469], + [0.30605549, -0.16378166, 0.9378196]]], + + [[[0.2244257, 0.97366253, -0.04017949], + [0.3659656, -0.46513121, 0.80605344], + [0.84032072, -0.49375814, -0.22374984]]], + + [[[0.53356359, -0.84522153, 0.03017378], + [0.5527576, 0.82792627, 0.09485319], + [0.37296054, -0.18433035, -0.90935293]]]], + + [[[[0., 0., 0.], + [0., 0., 0.], + [0., 0., 0.]]], + + [[[0., 0., 0.], + [0., 0., 0.], + [0., 0., 0.]]], + + [[[0., 0., 0.], + [0., 0., 0.], + [0., 0., 0.]]]]] +) + # 3D array with slices 2-7 with values from 1-6 (with borders of 0) ref_in_labels = np.zeros((10, 10, 10), dtype=np.uint16) for i in range(2, 8): From 93a0e7dae6594447fc4921513f17c9566e9d3a01 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 15 Dec 2023 11:54:09 -0500 Subject: [PATCH 38/63] extend cov report --- Jenkinsfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 57a7f6079e..1be9c3853e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -24,13 +24,14 @@ pipeline { steps { withPythonEnv('CPython-3.10') { sh ''' + pip3 install pytest-cov pip3 install wheel==0.38.* pip3 install numpy==1.23.* pip3 install packaging==23.* pip3 install -e . export MPLBACKEND="agg" export OPENBLAS_NUM_THREADS=1 - pytest -v + pytest --cov-report=xml --cov-report term-missing:skip-covered --cov=scilpy ''' } } From 0a0232c3d5b7a360b6237293f138db8e32f3cb84 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 15 Dec 2023 12:25:41 -0500 Subject: [PATCH 39/63] prepare for coverage report --- .coveragerc | 20 ++++++++++++++++++++ Jenkinsfile | 2 +- pytest.ini | 16 +++++++++++++++- 3 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000000..10530e8c55 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,20 @@ +[run] +branch = True +concurrency = multiprocessing +data_file = .test_reports/.coverage +source = + scilpy/ + scripts/ +omit = + scripts/tests/*.py + scilpy/tests/**/*.py + scilpy/**/tests/*.py + scilpy/**/tests/**/*.py + scripts/tests/*.py + scripts/tests/**/*.py + +[report] +skip_empty = True + +[html] +title = Scilpy Coverage Report diff --git a/Jenkinsfile b/Jenkinsfile index 1be9c3853e..f9679cd7f7 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -24,7 +24,7 @@ pipeline { steps { withPythonEnv('CPython-3.10') { sh ''' - pip3 install pytest-cov + pip3 install pytest-cov pytest-html pip3 install wheel==0.38.* pip3 install numpy==1.23.* pip3 install packaging==23.* diff --git a/pytest.ini b/pytest.ini index 3f077269ed..a4f1b0df86 100644 --- a/pytest.ini +++ b/pytest.ini @@ -21,4 +21,18 @@ filterwarnings = once:::statsmodels once:::dmri-commit once:::cvxpy - once:::dmri-amico \ No newline at end of file + once:::dmri-amico + +required_plugins = + pytest-console-scripts + pytest-mock + +junit_logging = out-err + +addopts = + --html=.test_reports/pytest.html + --cov-report=html:.test_reports/coverage.html + --junit-xml=.test_reports/junit.xml + --cov-report=xml:.test_reports/coverage.xml + --cov=scilpy/ + --cov=scripts/ From 204383cfc34189e939e240d7598b259dcb22b87a Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 15 Dec 2023 13:50:28 -0500 Subject: [PATCH 40/63] update requirements --- requirements.txt | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index 39301a8966..cdbdfba356 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -bids-validator==1.9.* +bids-validator==1.11.* bctpy==0.5.* bz2file==0.98.* coloredlogs==15.0.* @@ -18,13 +18,13 @@ h5py==3.7.* joblib==1.2.* kiwisolver==1.4.* matplotlib==3.6.* -nibabel==4.0.* +nibabel==5.2.* nilearn==0.9.* numpy==1.23.* openpyxl==3.0.* packaging == 23.2.* Pillow==10.0.* -pybids==0.15.* +pybids==0.16.* pyparsing==3.0.* PySocks==1.7.* pytest==7.2.* @@ -40,6 +40,3 @@ spams==2.6.* statsmodels==0.13.* trimeshpy==0.0.3 vtk==9.2.* -# Dipy requirements -h5py>=2.8.0 -tqdm>=4.30.0 \ No newline at end of file From 65188e69a5fcac3e4d16bad28b4f5943a5c1aabb Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 15 Dec 2023 13:56:01 -0500 Subject: [PATCH 41/63] Update following Alex comments --- Jenkinsfile | 8 +++++++- pytest.ini | 2 ++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index f9679cd7f7..dde807425e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -31,9 +31,15 @@ pipeline { pip3 install -e . export MPLBACKEND="agg" export OPENBLAS_NUM_THREADS=1 - pytest --cov-report=xml --cov-report term-missing:skip-covered --cov=scilpy + pytest --cov-report term-missing:skip-covered ''' } + discoverGitReferenceBuild() + recordCoverage( + name: 'Scilpy Coverage Report', + sourceCodeRetention: 'MODIFIED', + tools: [[parser: 'COBERTURA', + pattern: '**/.test_reports/coverage.xml']]) } } diff --git a/pytest.ini b/pytest.ini index a4f1b0df86..5a7cbe1753 100644 --- a/pytest.ini +++ b/pytest.ini @@ -26,6 +26,8 @@ filterwarnings = required_plugins = pytest-console-scripts pytest-mock + pytest-html + pytest-cov junit_logging = out-err From b364f723bd7ce3913d78884906b64c3a9c8ba529 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 15 Dec 2023 14:56:29 -0500 Subject: [PATCH 42/63] fix requirements.tx --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index cdbdfba356..e0b8294f8d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ bids-validator==1.11.* bctpy==0.5.* bz2file==0.98.* coloredlogs==15.0.* -cvxpy==1.3.* +cvxpy==1.4.* cycler==0.11.* Cython==0.29.*, !=0.29.29 dipy==1.8.* @@ -11,7 +11,7 @@ dmri-amico==1.5.* dmri-commit==1.6.* docopt==0.6.* formulaic==0.3.* -fury==0.8.* +fury==0.9.* future==0.18.* GitPython==3.1.* h5py==3.7.* From 74a6239d9d525f447113a55ba95ccabef907ffb2 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 15 Dec 2023 14:58:17 -0500 Subject: [PATCH 43/63] Alex knows what to do --- Jenkinsfile | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index dde807425e..1c4fda6f8c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -63,6 +63,13 @@ pipeline { pullRequest.createReviewRequests(['frheault']) } } + xunit( + checksName: '', + tools: [JUnit(excludesPattern: '', failIfNotNew: false, + pattern: '**/.test_reports/junit.xml', + skipNoTestFiles: true, + stopProcessingIfError: true)] + ) } } failure { From fa2ed9f58bff8b84c6656dbe74d694abb5d0617a Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 15 Dec 2023 16:12:12 -0500 Subject: [PATCH 44/63] add requirements for local testing --- requirements.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements.txt b/requirements.txt index 4122bc8a4f..7cf24a99ea 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,6 +29,8 @@ pyparsing==3.0.* PySocks==1.7.* pytest==7.2.* pytest-console-scripts==1.3.* +pytest-cov==4.1.0 +pytest-html==4.1.1 pytest-mock==3.10.* python-dateutil==2.8.* pytz==2022.6.* From 2baa1d7d188f7950735b345f8e8a3fe7ff61f920 Mon Sep 17 00:00:00 2001 From: EmmaRenauld Date: Thu, 14 Dec 2023 11:29:14 -0500 Subject: [PATCH 45/63] Rename the two header files --- scripts/legacy/scil_print_header.py | 20 ++++++++++++++++++ ...l_verify_space_attributes_compatibility.py | 21 +++++++++++++++++++ ...nt_header.py => scil_header_print_info.py} | 0 ... => scil_header_validate_compatibility.py} | 0 ...nt_header.py => test_header_print_info.py} | 12 +++++------ ...y => test_header_validate_comptability.py} | 11 ++++------ 6 files changed, 50 insertions(+), 14 deletions(-) create mode 100644 scripts/legacy/scil_print_header.py create mode 100644 scripts/legacy/scil_verify_space_attributes_compatibility.py rename scripts/{scil_print_header.py => scil_header_print_info.py} (100%) rename scripts/{scil_verify_space_attributes_compatibility.py => scil_header_validate_compatibility.py} (100%) rename scripts/tests/{test_print_header.py => test_header_print_info.py} (62%) rename scripts/tests/{test_verify_space_attributes_compatibility.py => test_header_validate_comptability.py} (58%) diff --git a/scripts/legacy/scil_print_header.py b/scripts/legacy/scil_print_header.py new file mode 100644 index 0000000000..6aa4274192 --- /dev/null +++ b/scripts/legacy/scil_print_header.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_header_print_info import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_header_print_info.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_print_header.py", DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/legacy/scil_verify_space_attributes_compatibility.py b/scripts/legacy/scil_verify_space_attributes_compatibility.py new file mode 100644 index 0000000000..b7525f41dd --- /dev/null +++ b/scripts/legacy/scil_verify_space_attributes_compatibility.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_header_validate_compatibility import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_header_validate_compatibility.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_verify_space_attributes_compatibility.py", + DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/scil_print_header.py b/scripts/scil_header_print_info.py similarity index 100% rename from scripts/scil_print_header.py rename to scripts/scil_header_print_info.py diff --git a/scripts/scil_verify_space_attributes_compatibility.py b/scripts/scil_header_validate_compatibility.py similarity index 100% rename from scripts/scil_verify_space_attributes_compatibility.py rename to scripts/scil_header_validate_compatibility.py diff --git a/scripts/tests/test_print_header.py b/scripts/tests/test_header_print_info.py similarity index 62% rename from scripts/tests/test_print_header.py rename to scripts/tests/test_header_print_info.py index c401277bab..93d12d8b57 100644 --- a/scripts/tests/test_print_header.py +++ b/scripts/tests/test_header_print_info.py @@ -12,21 +12,19 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_print_header.py', '--help') + ret = script_runner.run('scil_header_print_info.py', '--help') assert ret.success def test_execution_img(script_runner): os.chdir(os.path.expanduser(tmp_dir.name)) - in_img = os.path.join(get_home(), 'others', - 'fa.nii.gz') - ret = script_runner.run('scil_print_header.py', in_img) + in_img = os.path.join(get_home(), 'others', 'fa.nii.gz') + ret = script_runner.run('scil_header_print_info.py', in_img) assert ret.success def test_execution_tractogram(script_runner): os.chdir(os.path.expanduser(tmp_dir.name)) - in_tracto = os.path.join(get_home(), 'others', - 'IFGWM.trk') - ret = script_runner.run('scil_print_header.py', in_tracto) + in_tracto = os.path.join(get_home(), 'others', 'IFGWM.trk') + ret = script_runner.run('scil_header_print_info.py', in_tracto) assert ret.success diff --git a/scripts/tests/test_verify_space_attributes_compatibility.py b/scripts/tests/test_header_validate_comptability.py similarity index 58% rename from scripts/tests/test_verify_space_attributes_compatibility.py rename to scripts/tests/test_header_validate_comptability.py index 1cf9720dc8..86b682da07 100644 --- a/scripts/tests/test_verify_space_attributes_compatibility.py +++ b/scripts/tests/test_header_validate_comptability.py @@ -13,17 +13,14 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_verify_space_attributes_compatibility.py', - '--help') + ret = script_runner.run('scil_header_validate_comptability.py', '--help') assert ret.success def test_execution_filtering(script_runner): os.chdir(os.path.expanduser(tmp_dir.name)) - in_bundle = os.path.join(get_home(), 'filtering', - 'bundle_all_1mm.trk') - in_roi = os.path.join(get_home(), 'filtering', - 'mask.nii.gz') - ret = script_runner.run('scil_verify_space_attributes_compatibility.py', + in_bundle = os.path.join(get_home(), 'filtering', 'bundle_all_1mm.trk') + in_roi = os.path.join(get_home(), 'filtering', 'mask.nii.gz') + ret = script_runner.run('scil_header_validate_compatibility.py', in_bundle, in_roi) assert ret.success From b7ac69b92b2846867452a062ad33aeb9b25a71bd Mon Sep 17 00:00:00 2001 From: EmmaRenauld Date: Thu, 14 Dec 2023 11:35:15 -0500 Subject: [PATCH 46/63] Rename convert fdf --- scripts/legacy/scil_convert_fdf.py | 20 +++++++++++++++++++ ...convert_fdf.py => scil_dwi_convert_FDF.py} | 4 ++-- ...convert_fdf.py => test_dwi_convert_FDF.py} | 2 +- 3 files changed, 23 insertions(+), 3 deletions(-) create mode 100755 scripts/legacy/scil_convert_fdf.py rename scripts/{scil_convert_fdf.py => scil_dwi_convert_FDF.py} (94%) rename scripts/tests/{test_convert_fdf.py => test_dwi_convert_FDF.py} (62%) diff --git a/scripts/legacy/scil_convert_fdf.py b/scripts/legacy/scil_convert_fdf.py new file mode 100755 index 0000000000..2ce349bc6f --- /dev/null +++ b/scripts/legacy/scil_convert_fdf.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from scilpy.io.deprecator import deprecate_script +from scripts.scil_dwi_convert_FDF import main as new_main + + +DEPRECATION_MSG = """ +This script has been renamed scil_dwi_convert_FDF.py. +Please change your existing pipelines accordingly. +""" + + +@deprecate_script("scil_convert_fdf.py", DEPRECATION_MSG, '1.7.0') +def main(): + new_main() + + +if __name__ == "__main__": + main() diff --git a/scripts/scil_convert_fdf.py b/scripts/scil_dwi_convert_FDF.py similarity index 94% rename from scripts/scil_convert_fdf.py rename to scripts/scil_dwi_convert_FDF.py index 44a71b430e..223a6fe115 100755 --- a/scripts/scil_convert_fdf.py +++ b/scripts/scil_dwi_convert_FDF.py @@ -6,8 +6,8 @@ If the procpar contains diffusion information, it will be saved as bval and bvec in the same folder as the output file. - ex: scil_convert_fdf.py semsdw/b0_folder/ semsdw/dwi_folder/ dwi.nii.gz \ - --bval dwi.bval --bvec dwi.bvec -f + ex: scil_dwi_convert_FDF.py semsdw/b0_folder/ semsdw/dwi_folder/ \ + dwi.nii.gz --bval dwi.bval --bvec dwi.bvec -f """ import argparse diff --git a/scripts/tests/test_convert_fdf.py b/scripts/tests/test_dwi_convert_FDF.py similarity index 62% rename from scripts/tests/test_convert_fdf.py rename to scripts/tests/test_dwi_convert_FDF.py index d9d43e6b98..25aad8fb85 100644 --- a/scripts/tests/test_convert_fdf.py +++ b/scripts/tests/test_dwi_convert_FDF.py @@ -3,5 +3,5 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_convert_fdf.py', '--help') + ret = script_runner.run('scil_dwi_convert_fdf.py', '--help') assert ret.success From 56a0a3b70853a0ce6cb6d8ea7a828d50da5b93eb Mon Sep 17 00:00:00 2001 From: EmmaRenauld Date: Fri, 15 Dec 2023 08:52:49 -0500 Subject: [PATCH 47/63] Fix typos --- scripts/tests/test_dwi_convert_FDF.py | 2 +- ...te_comptability.py => test_header_validate_compatibility.py} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename scripts/tests/{test_header_validate_comptability.py => test_header_validate_compatibility.py} (90%) diff --git a/scripts/tests/test_dwi_convert_FDF.py b/scripts/tests/test_dwi_convert_FDF.py index 25aad8fb85..582473264f 100644 --- a/scripts/tests/test_dwi_convert_FDF.py +++ b/scripts/tests/test_dwi_convert_FDF.py @@ -3,5 +3,5 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_dwi_convert_fdf.py', '--help') + ret = script_runner.run('scil_dwi_convert_FDF.py', '--help') assert ret.success diff --git a/scripts/tests/test_header_validate_comptability.py b/scripts/tests/test_header_validate_compatibility.py similarity index 90% rename from scripts/tests/test_header_validate_comptability.py rename to scripts/tests/test_header_validate_compatibility.py index 86b682da07..a407d20e11 100644 --- a/scripts/tests/test_header_validate_comptability.py +++ b/scripts/tests/test_header_validate_compatibility.py @@ -13,7 +13,7 @@ def test_help_option(script_runner): - ret = script_runner.run('scil_header_validate_comptability.py', '--help') + ret = script_runner.run('scil_header_validate_compatibility.py', '--help') assert ret.success From 8abd54f4c4fc116efa344ddbb55648d465cb45e6 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Mon, 18 Dec 2023 15:10:16 -0500 Subject: [PATCH 48/63] fix some pep8 --- scripts/scil_bids_validate.py | 46 +++++++++++++++++++++++------------ 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/scripts/scil_bids_validate.py b/scripts/scil_bids_validate.py index 190108a579..b1dfee5f39 100755 --- a/scripts/scil_bids_validate.py +++ b/scripts/scil_bids_validate.py @@ -7,7 +7,8 @@ - DWI/rev_DWI - T1 - fmap/sbref (based on IntendedFor entity) -- Freesurfer (optional - one per participant) +- Freesurfer (optional - could be one per participant + or one per participant/session) The BIDS dataset MUST be homogeneous. The metadata need to be uniform across all participants/sessions/runs @@ -59,7 +60,8 @@ def _build_arg_parser(): p.add_argument('--bids_ignore', help="If you want to ignore some subjects or some files, " "you can provide an extra bidsignore file." - "Check: https://github.com/bids-standard/bids-validator#bidsignore") + "Check: https://github.com/bids-standard" + "/bids-validator#bidsignore") p.add_argument("--fs", help='Output freesurfer path. It will add keys wmparc and ' @@ -105,8 +107,9 @@ def _load_bidsignore_(bids_root, additional_bidsignore=None): return tuple() -def get_opposite_phase_encoding_direction(phase_encoding_direction): - """ Return opposite direction (works with direction or PhaseEncodingDirection) +def get_opposite_pe_direction(phase_encoding_direction): + """ Return opposite direction (works with direction + or PhaseEncodingDirection) Parameters ---------- @@ -220,7 +223,7 @@ def get_data(layout, nSub, dwis, t1s, fs, default_readout, clean): related_files_suffixes = [] for curr_related in related_files: related_files_suffixes.append(curr_related.entities['suffix']) - if dwi_direction == get_opposite_phase_encoding_direction(curr_related.entities[direction_key]): + if dwi_direction == get_opposite_pe_direction(curr_related.entities[direction_key]): PE[1] = conversion[curr_related.entities[direction_key]] topup_suffix[curr_related.entities['suffix']][1] = curr_related.path else: @@ -228,10 +231,12 @@ def get_data(layout, nSub, dwis, t1s, fs, default_readout, clean): if related_files_suffixes.count('epi') > 2 or related_files_suffixes.count('sbref') > 2: topup_suffix = {'epi': ['', ''], 'sbref': ['', '']} - logging.warning('Too many files pointing to {}.'.format(dwis[0].path)) + logging.warning("Too many files " + "pointing to {}.".format(dwis[0].path)) else: topup = ['', ''] - logging.warning('IntendedFor: No file pointing to {}'.format(dwis[0].path)) + logging.warning("IntendedFor: No file" + " pointing to {}".format(dwis[0].path)) if len(dwis) == 2: if not any(s == '' for s in topup_suffix['sbref']): @@ -246,7 +251,8 @@ def get_data(layout, nSub, dwis, t1s, fs, default_readout, clean): if topup_suffix['epi'][1] != '': topup = topup_suffix['epi'] elif not any(s == '' for s in topup_suffix['sbref']): - logging.warning("You have two sbref but only one dwi this scheme is not accepted.") + logging.warning("You have two sbref but " + "only one dwi this scheme is not accepted.") topup = ['', ''] else: topup = ['', ''] @@ -259,13 +265,18 @@ def get_data(layout, nSub, dwis, t1s, fs, default_readout, clean): return {} if not any(s == '' for s in topup): - logging.info("Found rev b0 and b0 images to correct for geometrical distorsion") + logging.info("Found rev b0 and b0 images " + "to correct for geometrical distorsion") elif not topup[1]: - logging.warning("No rev image found to correct for geometrical distorsion") + logging.warning("No rev image found " + "to correct for geometrical distorsion") elif topup[1]: - logging.info("Found rev b0 to correct for geometrical distorsion") + logging.info("Found rev b0 to correct " + "for geometrical distorsion") else: - logging.warning("Only found one b0 with same PhaseEncodedDirection won't be enough to correct for geometrical distorsion") + logging.warning("Only found one b0 with same " + "PhaseEncodedDirection won't be enough to " + "correct for geometrical distorsion") # T1 setup t1_path = 'todo' @@ -359,7 +370,8 @@ def associate_dwis(layout, nSub): layout.get(part='mag', **base_dict)] if len(phaseEncodingDirection) > 2 or len(directions) > 2: - logging.warning("These acquisitions have too many encoding directions.") + logging.warning("These acquisitions have " + "too many encoding directions.") return [] all_dwis = layout.get(part=Query.NONE, @@ -401,7 +413,7 @@ def associate_dwis(layout, nSub): direction = 'PhaseEncodingDirection' if direction: - rev_curr_entity[direction] = get_opposite_phase_encoding_direction(rev_curr_entity[direction]) + rev_curr_entity[direction] = get_opposite_pe_direction(rev_curr_entity[direction]) if rev_curr_entity == rev_dwi.get_entities(): curr_association.append(rev_dwi) rev_iter_to_rm.append(iter_rev) @@ -421,7 +433,8 @@ def associate_dwis(layout, nSub): if len(curr_association) < 3: all_associated_dwis.append(curr_association) else: - logging.warning("These acquisitions have too many associated dwis.") + logging.warning("These acquisitions have " + "too many associated dwis.") del all_dwis[0] if len(all_rev_dwis): @@ -473,7 +486,8 @@ def main(): fs_sub_path = test_fs_sub_path elif 'session' in dwi[0].entities: nSess = dwi[0].entities['session'] - test_fs_sub_path = os.path.join(abs_fs, 'sub-' + nSub + '_ses-' + nSess) + test_fs_sub_path = os.path.join(abs_fs, + 'sub-' + nSub + '_ses-' + nSess) if os.path.exists(test_fs_sub_path): fs_sub_path = test_fs_sub_path From 8d01c445bd93a790505ccfce589487dbf758b41d Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Tue, 19 Dec 2023 10:33:37 -0500 Subject: [PATCH 49/63] improve bids validation with fs --- scripts/scil_bids_validate.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/scripts/scil_bids_validate.py b/scripts/scil_bids_validate.py index b1dfee5f39..8846052506 100755 --- a/scripts/scil_bids_validate.py +++ b/scripts/scil_bids_validate.py @@ -353,25 +353,29 @@ def associate_dwis(layout, nSub): directions.sort() if not directions and 'PhaseEncodingDirection' in layout.get_entities(): - logging.info("Found no directions.") + logging.info("Found no directions") directions = [Query.ANY, Query.ANY] phaseEncodingDirection = layout.get_PhaseEncodingDirection(**base_dict) if len(phaseEncodingDirection) == 1: - logging.info("Found one phaseEncodingDirection.") + logging.info("Found one phaseEncodingDirection") return [[el] for el in layout.get(part=Query.NONE, **base_dict) + layout.get(part='mag', **base_dict)] + elif len(phaseEncodingDirection) == 0: + logging.warning("PhaseEncodingDirection exists in this " + "dataset, but no DWI was found") + return [] elif len(directions) == 1: logging.info("Found one direction.") return [[el] for el in layout.get(part=Query.NONE, **base_dict) + layout.get(part='mag', **base_dict)] elif not directions: - logging.info("Found no directions or PhaseEncodingDirections.") + logging.info("Found no directions or PhaseEncodingDirections") return [[el] for el in layout.get(part=Query.NONE, **base_dict) + layout.get(part='mag', **base_dict)] if len(phaseEncodingDirection) > 2 or len(directions) > 2: logging.warning("These acquisitions have " - "too many encoding directions.") + "too many encoding directions") return [] all_dwis = layout.get(part=Query.NONE, @@ -494,7 +498,8 @@ def main(): if fs_sub_path: t1_fs = glob(os.path.join(fs_sub_path, 'mri/T1.mgz')) wmparc = glob(os.path.join(fs_sub_path, 'mri/wmparc.mgz')) - aparc_aseg = glob(os.path.join(fs_sub_path, 'mri/aparc+aseg.mgz')) + aparc_aseg = glob(os.path.join(fs_sub_path, + 'mri/aparc+aseg.mgz')) if len(t1_fs) == 1 and len(wmparc) == 1 and len(aparc_aseg) == 1: fs_inputs = [t1_fs[0], wmparc[0], aparc_aseg[0]] From 0ac6af3a944ee281e50618f4527c1c2e322011d1 Mon Sep 17 00:00:00 2001 From: Antoine Theberge Date: Wed, 20 Dec 2023 13:24:29 -0500 Subject: [PATCH 50/63] ENH: output handling --- scripts/scil_gradients_convert.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/scil_gradients_convert.py b/scripts/scil_gradients_convert.py index 90b5b06300..92af320e0d 100755 --- a/scripts/scil_gradients_convert.py +++ b/scripts/scil_gradients_convert.py @@ -25,8 +25,9 @@ def _build_arg_parser(): '(.bval, .bvec) or MRtrix (.b).') p.add_argument('output', type=str, - help='Path to output file(s) without extension. Either ' - 'FSL (output.bval, output.bvec) or MRtrix (output.b).') + help='Basename of output without extension. Extension(s) ' + 'will be added automatically (.b for MRtrix, ' + '.bval/.bvec for FSL.') grad_format_group = p.add_mutually_exclusive_group(required=True) grad_format_group.add_argument('--input_fsl', action='store_true', @@ -59,8 +60,7 @@ def main(): fsl2mrtrix(fsl_bval, fsl_bvec, args.output) else: output = [args.output + '.bval', args.output + '.bvec'] - assert_outputs_exist(parser, args, output[0]) - assert_outputs_exist(parser, args, output[1]) + assert_outputs_exist(parser, args, output) mrtrix_b = args.gradients[0] mrtrix2fsl(mrtrix_b, args.output) From 51646ce3da750c886bbe21cadc8c5eb1ed008832 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 20 Dec 2023 14:09:55 -0500 Subject: [PATCH 51/63] 4th round of help + pep8 --- scripts/scil_bundle_mean_fixel_bingham_metric.py | 4 +++- scripts/scil_dwi_convert_FDF.py | 12 +++++++----- scripts/scil_header_print_info.py | 2 ++ scripts/scil_header_validate_compatibility.py | 2 ++ scripts/scil_json_convert_entries_to_xlsx.py | 5 ++++- scripts/scil_json_harmonize_entries.py | 2 ++ scripts/scil_json_merge_entries.py | 2 ++ scripts/scil_tractogram_print_info.py | 2 +- 8 files changed, 23 insertions(+), 8 deletions(-) diff --git a/scripts/scil_bundle_mean_fixel_bingham_metric.py b/scripts/scil_bundle_mean_fixel_bingham_metric.py index 22f7e1309f..efe3021da2 100755 --- a/scripts/scil_bundle_mean_fixel_bingham_metric.py +++ b/scripts/scil_bundle_mean_fixel_bingham_metric.py @@ -21,6 +21,8 @@ current streamline segment. Please use a bundle file rather than a whole tractogram. + +Formerly: scil_compute_mean_fixel_obe_metric_from_bundles.py """ import argparse @@ -61,7 +63,7 @@ def _build_arg_parser(): add_reference_arg(p) add_verbose_arg(p) add_overwrite_arg(p) - + return p diff --git a/scripts/scil_dwi_convert_FDF.py b/scripts/scil_dwi_convert_FDF.py index 223a6fe115..ece390e94a 100755 --- a/scripts/scil_dwi_convert_FDF.py +++ b/scripts/scil_dwi_convert_FDF.py @@ -2,12 +2,14 @@ # -*- coding: utf-8 -*- """ - Converts a Varian FDF file or directory to a nifti file. - If the procpar contains diffusion information, it will be saved as bval and - bvec in the same folder as the output file. +Converts a Varian FDF file or directory to a nifti file. +If the procpar contains diffusion information, it will be saved as bval and +bvec in the same folder as the output file. - ex: scil_dwi_convert_FDF.py semsdw/b0_folder/ semsdw/dwi_folder/ \ - dwi.nii.gz --bval dwi.bval --bvec dwi.bvec -f +ex: scil_dwi_convert_FDF.py semsdw/b0_folder/ semsdw/dwi_folder/ \ + dwi.nii.gz --bval dwi.bval --bvec dwi.bvec -f + +Formerly: scil_convert_fdf.py """ import argparse diff --git a/scripts/scil_header_print_info.py b/scripts/scil_header_print_info.py index 6d21731ce3..b6fcce7132 100755 --- a/scripts/scil_header_print_info.py +++ b/scripts/scil_header_print_info.py @@ -4,6 +4,8 @@ """ Print the raw header from the provided file or only the specified keys. Support trk, nii and mgz files. + +Formerly: scil_print_header.py """ import argparse diff --git a/scripts/scil_header_validate_compatibility.py b/scripts/scil_header_validate_compatibility.py index 643e93f2a8..3c7cab6139 100755 --- a/scripts/scil_header_validate_compatibility.py +++ b/scripts/scil_header_validate_compatibility.py @@ -6,6 +6,8 @@ of their spatial attributes. Spatial attributes are: affine, dimensions, voxel sizes and voxel order. + +Formerly: scil_verify_space_attributes_compatibility.py """ import argparse diff --git a/scripts/scil_json_convert_entries_to_xlsx.py b/scripts/scil_json_convert_entries_to_xlsx.py index b2d0ea6bb2..7a84615e78 100755 --- a/scripts/scil_json_convert_entries_to_xlsx.py +++ b/scripts/scil_json_convert_entries_to_xlsx.py @@ -1,8 +1,11 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -""" Convert a final aggregated json file to an Excel spreadsheet. +""" +Convert a final aggregated json file to an Excel spreadsheet. Typically used during the tractometry pipeline. + +Formerly: scil_convert_json_to_xlsx.py """ import argparse diff --git a/scripts/scil_json_harmonize_entries.py b/scripts/scil_json_harmonize_entries.py index ac52a17989..6f67c56375 100755 --- a/scripts/scil_json_harmonize_entries.py +++ b/scripts/scil_json_harmonize_entries.py @@ -11,6 +11,8 @@ which will cause a panda array to be incomplete, and thus crash. Finding out the union of all bundles/metrics/lesions will allow to create a complete json (but with NaN for missing values). + +Formerly: scil_harmonize_json.py """ import argparse diff --git a/scripts/scil_json_merge_entries.py b/scripts/scil_json_merge_entries.py index 336e1d9aec..dd362cf983 100755 --- a/scripts/scil_json_merge_entries.py +++ b/scripts/scil_json_merge_entries.py @@ -24,6 +24,8 @@ --average_last_layer option will average all entries (scalar) at the lowest layers, but instead of creating a list it creates a mean/std level. + +Formerly: scil_merge_json.py """ import argparse diff --git a/scripts/scil_tractogram_print_info.py b/scripts/scil_tractogram_print_info.py index 9586446d13..84cbcac74d 100755 --- a/scripts/scil_tractogram_print_info.py +++ b/scripts/scil_tractogram_print_info.py @@ -11,7 +11,7 @@ For trk files: also prints the data_per_point and data_per_streamline keys. See also: - - scil_print_header.py to see the header, affine, volume dimension. + - scil_header_print_info.py to see the header, affine, volume dimension. - scil_bundle_shape_measures.py to see bundle-specific information. """ From d8084bf7eedceb17c18fb62cf25544ed967afbed Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Wed, 20 Dec 2023 15:46:20 -0500 Subject: [PATCH 52/63] fix test and some pep8 --- scilpy/image/tests/test_labels.py | 8 +++++--- scilpy/image/tests/test_volume_math.py | 4 +++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/scilpy/image/tests/test_labels.py b/scilpy/image/tests/test_labels.py index f6a9dad66a..bb7d1129ab 100644 --- a/scilpy/image/tests/test_labels.py +++ b/scilpy/image/tests/test_labels.py @@ -82,8 +82,10 @@ def test_dilate_labels_with_mask(): in_mask = deepcopy(ref_in_labels) in_mask[in_mask > 0] = 1 out_labels = dilate_labels(in_labels, 1, 2, 1, - labels_to_dilate=[1, 6], labels_not_to_dilate=[3, 4], - labels_to_fill=[0, 2, 5], mask=in_mask) + labels_to_dilate=[1, 6], + labels_not_to_dilate=[3, 4], + labels_to_fill=[0, 2, 5], + mask=in_mask) exp_labels = deepcopy(ref_in_labels) exp_labels[exp_labels == 2] = 1 @@ -105,7 +107,7 @@ def test_dilate_labels_without_mask(): def test_get_data_as_labels_int(): data = np.zeros((2, 2, 2), dtype=np.int64) - img = nib.Nifti1Image(data, np.eye(4)) + img = nib.Nifti1Image(data, np.eye(4), dtype=np.int64) img.set_filename('test.nii.gz') _ = get_data_as_labels(img) diff --git a/scilpy/image/tests/test_volume_math.py b/scilpy/image/tests/test_volume_math.py index 961a9cbbe8..1acdf11962 100644 --- a/scilpy/image/tests/test_volume_math.py +++ b/scilpy/image/tests/test_volume_math.py @@ -212,7 +212,8 @@ def test_cut_up_cube_with_known_output(): [0, 0, 0]]]) # Asserting that the output shape matches the expected shape - assert result.shape == expected_shape, f"Expected shape {expected_shape}, got {result.shape}" + assert result.shape == expected_shape, \ + f"Expected shape {expected_shape}, got {result.shape}" # Asserting that the first block matches the expected first block assert_array_equal(result[0, 0, 0, :, :, :], expected_first_block) @@ -422,6 +423,7 @@ def test_subtraction(): img2 = nib.Nifti1Image(img_data_2, affine) expected_output = img_data_1 - img_data_2 output_data = subtraction([img1, img2], img1) + assert_array_almost_equal(output_data, expected_output) def test_multiplication(): From 0dafa947b26762f7aa1273f7e6226bbd828ade75 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 20 Dec 2023 16:08:31 -0500 Subject: [PATCH 53/63] 5th round of help + pep8 --- scripts/scil_bids_validate.py | 2 ++ scripts/scil_gradients_convert.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/scil_bids_validate.py b/scripts/scil_bids_validate.py index 8846052506..43c22ea51e 100755 --- a/scripts/scil_bids_validate.py +++ b/scripts/scil_bids_validate.py @@ -15,6 +15,8 @@ Mandatory entity: IntendedFor Sensitive entities: PhaseEncodingDirection, TotalReadoutTime, direction + +Formerly: scil_validate_bids.py """ import os diff --git a/scripts/scil_gradients_convert.py b/scripts/scil_gradients_convert.py index 92af320e0d..f3cfd0025b 100755 --- a/scripts/scil_gradients_convert.py +++ b/scripts/scil_gradients_convert.py @@ -4,7 +4,8 @@ """ Script to convert gradient tables between FSL and MRtrix formats. - +Formerly: scil_convert_gradients_mrtrix_to_fsl.py or +scil_convert_gradients_fsl_to_mrtrix.py """ import argparse From 14b92eb84ae464b2ad4e3315dd99d9d020b46376 Mon Sep 17 00:00:00 2001 From: gagnonanthony <79757265+gagnonanthony@users.noreply.github.com> Date: Wed, 20 Dec 2023 16:28:01 -0500 Subject: [PATCH 54/63] fix arnaud's comments --- scripts/scil_labels_dilate.py | 2 +- scripts/scil_visualize_seeds.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/scil_labels_dilate.py b/scripts/scil_labels_dilate.py index caf0dd7231..3d44b7c9e2 100755 --- a/scripts/scil_labels_dilate.py +++ b/scripts/scil_labels_dilate.py @@ -14,7 +14,7 @@ --label_to_fill 0 5001 5002 \\ --label_not_to_dilate 4 43 10 11 12 49 50 51 -Formerly: scil_labels_dilate.py +Formerly: scil_dilate_labels.py """ import argparse diff --git a/scripts/scil_visualize_seeds.py b/scripts/scil_visualize_seeds.py index 32fddfadf2..76daf1a7ed 100755 --- a/scripts/scil_visualize_seeds.py +++ b/scripts/scil_visualize_seeds.py @@ -6,8 +6,8 @@ When tractography was run, each streamline produced by the tracking algorithm saved its seeding point (its origin). -The tractogram must have been generated from scil_tracking_local or -scil_tracking_pft with the --save_seeds option. +The tractogram must have been generated from scil_tracking_local.py or +scil_tracking_pft.py with the --save_seeds option. """ import argparse From e9d02f1216b97ffaf2ed5833019330c5cba83394 Mon Sep 17 00:00:00 2001 From: Manonedde Date: Mon, 8 Jan 2024 10:56:23 -0500 Subject: [PATCH 55/63] fix basename and overwrite --- scripts/scil_dwi_compute_snr.py | 6 ++++-- scripts/scil_dwi_extract_b0.py | 3 ++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/scripts/scil_dwi_compute_snr.py b/scripts/scil_dwi_compute_snr.py index e5fbd33e53..46a6ff1767 100755 --- a/scripts/scil_dwi_compute_snr.py +++ b/scripts/scil_dwi_compute_snr.py @@ -31,6 +31,7 @@ import argparse import logging +import os from dipy.io.gradients import read_bvals_bvecs import matplotlib.pyplot as plt @@ -38,7 +39,7 @@ import numpy as np import pandas as pd -from scilpy.io.utils import (add_json_args, +from scilpy.io.utils import (add_json_args, add_overwrite_arg, add_verbose_arg, assert_inputs_exist) from scilpy.utils.filenames import split_name_with_nii @@ -83,6 +84,7 @@ def _build_arg_parser(): add_json_args(p) add_verbose_arg(p) + add_overwrite_arg(p) return p @@ -99,7 +101,7 @@ def main(): args.in_bvec, args.in_mask], [args.noise_mask, args.noise_map]) - basename, ext = split_name_with_nii(args.in_dwi) + basename, ext = split_name_with_nii(os.path.basename(args.in_dwi)) if args.out_basename: basename = args.out_basename diff --git a/scripts/scil_dwi_extract_b0.py b/scripts/scil_dwi_extract_b0.py index 9408ad8543..472a915211 100755 --- a/scripts/scil_dwi_extract_b0.py +++ b/scripts/scil_dwi_extract_b0.py @@ -21,7 +21,7 @@ from scilpy.dwi.utils import extract_b0 from scilpy.io.utils import (assert_inputs_exist, add_force_b0_arg, - add_verbose_arg) + add_verbose_arg, add_overwrite_arg) from scilpy.gradients.bvec_bval_tools import (check_b0_threshold, B0ExtractionStrategy) from scilpy.utils.filenames import split_name_with_nii @@ -69,6 +69,7 @@ def _build_arg_parser(): add_verbose_arg(p) add_force_b0_arg(p) + add_overwrite_arg(p) return p From 1cc2b66a1c23ac86fe44a9a56048a9a59b0fb6fd Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Mon, 8 Jan 2024 22:17:34 -0500 Subject: [PATCH 56/63] update amico and commit --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index e0b8294f8d..f8c6eca494 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,8 +7,8 @@ cycler==0.11.* Cython==0.29.*, !=0.29.29 dipy==1.8.* deepdiff==6.3.0 -dmri-amico==1.5.* -dmri-commit==1.6.* +dmri-amico==2.0.* +dmri-commit==2.0.* docopt==0.6.* formulaic==0.3.* fury==0.9.* From af4fe90331d2c509228fbd56833de1b6d94f8c50 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Tue, 9 Jan 2024 11:19:45 -0500 Subject: [PATCH 57/63] fix with new API --- scripts/scil_tractogram_commit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/scil_tractogram_commit.py b/scripts/scil_tractogram_commit.py index 6e6bde32ae..eea8f528a1 100755 --- a/scripts/scil_tractogram_commit.py +++ b/scripts/scil_tractogram_commit.py @@ -402,7 +402,7 @@ def main(): path_out=tmp_dir.name) # Preparation for fitting - commit.core.setup(ndirs=args.nbr_dir) + commit.core.setup() mit = commit.Evaluation('.', '.') # FIX for very small values during HCP processing From 3b7c5afda73273c90cb3c9aa56ce824a9d516914 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 11 Jan 2024 14:07:03 -0500 Subject: [PATCH 58/63] add pull request template --- docs/pull_request_template.md | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 docs/pull_request_template.md diff --git a/docs/pull_request_template.md b/docs/pull_request_template.md new file mode 100644 index 0000000000..b4629221e6 --- /dev/null +++ b/docs/pull_request_template.md @@ -0,0 +1,30 @@ +# Quick description + +Please include a summary of the changes and the related issue or the improvement. +Please also include relevant motivation and context. List any dependencies that are required for this change if needed. + +... + +## Type of change + +Please delete options that are not relevant. + +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] This change requires a documentation update + +## Provide data and command line to test + +... + +# Checklist: + +- [ ] My code follows the style guidelines of this project (mostly pep8 compliant) +- [ ] I have performed a self-review of my code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have made corresponding changes to the documentation +- [ ] My changes generate no new warnings +- [ ] I moved all functions from the script file (except the argparser and main) to scilpy modules +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes From 9393c73dcd27a871f90248d85749a1c9eaf7737e Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 11 Jan 2024 22:08:06 -0500 Subject: [PATCH 59/63] answer alex comments --- docs/pull_request_template.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/pull_request_template.md b/docs/pull_request_template.md index b4629221e6..d03047ab10 100644 --- a/docs/pull_request_template.md +++ b/docs/pull_request_template.md @@ -1,6 +1,6 @@ # Quick description -Please include a summary of the changes and the related issue or the improvement. +Please include a summary of the changes and the related issue(s) or improvement(s). Please also include relevant motivation and context. List any dependencies that are required for this change if needed. ... @@ -20,7 +20,8 @@ Please delete options that are not relevant. # Checklist: -- [ ] My code follows the style guidelines of this project (mostly pep8 compliant) +- [ ] My code follows the style guidelines of this project (run [autopep8](https://pypi.org/project/autopep8/)) +- [ ] I added relevant citations to scripts, modules and functions docstrings and descriptions - [ ] I have performed a self-review of my code - [ ] I have commented my code, particularly in hard-to-understand areas - [ ] I have made corresponding changes to the documentation From ef678b9707a71660629e2d67886a95889c1a48b7 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Thu, 11 Jan 2024 22:44:29 -0500 Subject: [PATCH 60/63] fix filtering --- scripts/scil_tractogram_filter_by_roi.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/scil_tractogram_filter_by_roi.py b/scripts/scil_tractogram_filter_by_roi.py index b2d8ccb274..b5782524ae 100755 --- a/scripts/scil_tractogram_filter_by_roi.py +++ b/scripts/scil_tractogram_filter_by_roi.py @@ -163,10 +163,10 @@ def prepare_filtering_list(parser, args): else: roi_opt_list.append(roi_opt.strip().split()) - if (len(roi_opt_list[-1]) < 4 or len(roi_opt_list) > 5) and roi_opt_list[-1][0] != 'atlas_roi': + if (len(roi_opt_list[-1]) < 4 or len(roi_opt_list[-1]) > 5) and roi_opt_list[-1][0] != 'atlas_roi': logging.error("Please specify 3 or 4 values " "for {} filtering.".format(roi_opt_list[-1][0])) - elif (len(roi_opt_list[-1]) < 5 or len(roi_opt_list) > 6) and roi_opt_list[-1][0] == 'atlas_roi': + elif (len(roi_opt_list[-1]) < 5 or len(roi_opt_list[-1]) > 6) and roi_opt_list[-1][0] == 'atlas_roi': logging.error("Please specify 4 or 5 values" " for {} filtering.".format(roi_opt_list[-1][0])) From 5f9b710c3a6b46b0546e0966ab7aa9498a60b8b1 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 12 Jan 2024 10:52:41 -0500 Subject: [PATCH 61/63] fix Antoine comments --- docs/pull_request_template.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/pull_request_template.md b/docs/pull_request_template.md index d03047ab10..f8fbfa0b93 100644 --- a/docs/pull_request_template.md +++ b/docs/pull_request_template.md @@ -7,18 +7,18 @@ Please also include relevant motivation and context. List any dependencies that ## Type of change -Please delete options that are not relevant. +Check the relevant options. - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] This change requires a documentation update -## Provide data and command line to test +## Provide data, screenshots, command line to test (if relevant) ... -# Checklist: +# Checklist - [ ] My code follows the style guidelines of this project (run [autopep8](https://pypi.org/project/autopep8/)) - [ ] I added relevant citations to scripts, modules and functions docstrings and descriptions From 572d74ce7db919e5f675d94af4d00ea517dff344 Mon Sep 17 00:00:00 2001 From: arnaudbore Date: Fri, 12 Jan 2024 11:39:55 -0500 Subject: [PATCH 62/63] update version and fix gitignore when doing test --- .gitignore | 1 + README.md | 10 ---------- scilpy/version.py | 6 +++--- 3 files changed, 4 insertions(+), 13 deletions(-) diff --git a/.gitignore b/.gitignore index 2fa437b3e7..bf7527f334 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,7 @@ htmlcov/ nosetests.xml coverage.xml *,cover +.test* # Translations *.mo diff --git a/README.md b/README.md index 7d19ef1300..f1e8fef57b 100644 --- a/README.md +++ b/README.md @@ -27,16 +27,6 @@ pip install --upgrade pip The library's structure is mostly aligned on that of [DIPY]. -⚠️ Breaking changes alert - scilpy 1.6.0 ⚠️ - -scilpy 1.6.0 is based on [hot_dipy](https://github.com/scilus/hot_dipy) a fork of dipy locked before release v1.8.0. -In order to install the library and scripts flawlessly (we hope), please follow these instructions: -``` -pip install packaging>=19.0 -pip install numpy==1.23.* -pip install Cython==0.29.* -``` - The library and scripts can be installed locally by using: ``` pip install -e . diff --git a/scilpy/version.py b/scilpy/version.py index 77e1078d49..ee020709d1 100644 --- a/scilpy/version.py +++ b/scilpy/version.py @@ -5,10 +5,10 @@ import os # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" -_version_major = 1 -_version_minor = 6 +_version_major = 2 +_version_minor = 0 _version_micro = 0 -_version_extra = '' +_version_extra = 'dev' # Construct full version string from these. _ver = [_version_major, _version_minor] From 139c3792c0de868b56750bb55b20cfab07fc4714 Mon Sep 17 00:00:00 2001 From: Arnaud Bore Date: Fri, 12 Jan 2024 12:17:27 -0500 Subject: [PATCH 63/63] Update version.py revert version to 1.7.0-dev to keep legacy scripts running --- scilpy/version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scilpy/version.py b/scilpy/version.py index ee020709d1..4927cb187b 100644 --- a/scilpy/version.py +++ b/scilpy/version.py @@ -5,8 +5,8 @@ import os # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" -_version_major = 2 -_version_minor = 0 +_version_major = 1 +_version_minor = 7 _version_micro = 0 _version_extra = 'dev'