diff --git a/deep_folding/brainvisa/__init__.py b/deep_folding/brainvisa/__init__.py index 463e689..4b1e7ce 100644 --- a/deep_folding/brainvisa/__init__.py +++ b/deep_folding/brainvisa/__init__.py @@ -15,3 +15,8 @@ def inner_function(*args, **kwargs): if exc.code != 0: reraise(*exc_info()) return inner_function + + +class DeepFoldingError(BaseException): + pass + diff --git a/deep_folding/brainvisa/generate_ICBM2009c_transforms.py b/deep_folding/brainvisa/generate_ICBM2009c_transforms.py index 721174d..b7f1765 100644 --- a/deep_folding/brainvisa/generate_ICBM2009c_transforms.py +++ b/deep_folding/brainvisa/generate_ICBM2009c_transforms.py @@ -50,10 +50,10 @@ import glob import sys import re -from os.path import abspath +from os.path import abspath, basename, join, dirname from os.path import basename -from deep_folding.brainvisa import exception_handler +from deep_folding.brainvisa import exception_handler, DeepFoldingError from deep_folding.brainvisa.utils.folder import create_folder from deep_folding.brainvisa.utils.subjects import get_number_subjects,\ is_it_a_subject @@ -129,10 +129,11 @@ def parse_args(argv): args = parser.parse_args(argv) + suffix = {"R": "right", "L": "left", "F": "full"} setup_log(args, log_dir=f"{args.output_dir}", prog_name=basename(__file__), - suffix='right' if args.side == 'R' else 'left') + suffix=suffix[args.side]) params = vars(args) @@ -166,8 +167,12 @@ def __init__(self, src_dir, transform_dir, def generate_one_transform(self, subject: str): """Generates and writes ICBM2009c transform for one subject. """ - graph_path = f"{self.src_dir}/{subject}*/" +\ - f"{self.path_to_graph}/{self.side}*.arg" + if self.side == "F": + graph_path = f"{self.src_dir}/{subject}*/" + \ + f"{self.path_to_graph}/?{subject}*.arg" + else: + graph_path = f"{self.src_dir}/{subject}*/" + \ + f"{self.path_to_graph}/{self.side}{subject}*.arg" log.debug(graph_path) list_graph_file = glob.glob(graph_path) log.debug(f"list_graph_file = {list_graph_file}") @@ -175,17 +180,36 @@ def generate_one_transform(self, subject: str): raise RuntimeError(f"No graph file! " f"{graph_path} doesn't exist") for graph_file in list_graph_file: - transform_file = self.get_transform_filename(subject, graph_file) - graph = aims.read(graph_file) - g_to_icbm_template = aims.GraphManip.getICBM2009cTemplateTransform( - graph) - aims.write(g_to_icbm_template, transform_file) - if not self.bids: - break + try: + transform_file = self.get_transform_filename(subject, graph_file) + if self.side == "F": + graph_file_left, graph_file_right, graph_to_remove = \ + self.get_left_and_right_graph_files(graph_file, list_graph_file) + list_graph_file.remove(graph_to_remove) + graph_left = aims.read(graph_file_left) + graph_right = aims.read(graph_file_right) + g_to_icbm_template_left = aims.GraphManip.getICBM2009cTemplateTransform( + graph_left) + g_to_icbm_template_right = aims.GraphManip.getICBM2009cTemplateTransform( + graph_right) + if g_to_icbm_template_left != g_to_icbm_template_right: + raise DeepFoldingError(f"Left and right transformations files are not the same: " + f"{g_to_icbm_template_left} and {g_to_icbm_template_right}") + aims.write(g_to_icbm_template_left, transform_file) + else: + graph = aims.read(graph_file) + g_to_icbm_template = aims.GraphManip.getICBM2009cTemplateTransform( + graph) + aims.write(g_to_icbm_template, transform_file) + if not self.bids: + break + except DeepFoldingError as e: + log.error(f"Graph file {graph_file} : {e}") + continue def get_transform_filename(self, subject, graph_file): - transform_file = ( - f"{self.transform_dir}/" + transform_file = join( + f"{self.transform_dir}", f"{self.side}transform_to_ICBM2009c_{subject}") if self.bids: session = re.search("ses-([^_/]+)", graph_file) @@ -200,6 +224,23 @@ def get_transform_filename(self, subject, graph_file): transform_file += ".trm" return transform_file + @staticmethod + def get_left_and_right_graph_files(graph_file, list_graph_file): + graph_name = basename(graph_file) + if graph_name.startswith("L"): + graph_file_left = graph_file + graph_file_right = join(dirname(graph_file), f"R{graph_name[1:]}") + if graph_file_right not in list_graph_file: + raise DeepFoldingError(f"Right graph is missing : {graph_file_right}") + graph_to_remove = graph_file_right + else: + graph_file_right = graph_file + graph_file_left = join(dirname(graph_file), f"L{graph_name[1:]}") + if graph_file_left not in list_graph_file: + raise DeepFoldingError(f"Left graph is missing : {graph_file_left}") + graph_to_remove = graph_file_left + return graph_file_left, graph_file_right, graph_to_remove + def compute(self, number_subjects): """Loops over subjects to generate transforms to ICBM2009c from graphs. """ diff --git a/deep_folding/brainvisa/generate_foldlabels.py b/deep_folding/brainvisa/generate_foldlabels.py index 8a820b2..3227807 100644 --- a/deep_folding/brainvisa/generate_foldlabels.py +++ b/deep_folding/brainvisa/generate_foldlabels.py @@ -38,7 +38,7 @@ Typical usage ------------- You can use this program by first entering in the brainvisa environment - (here brainvisa 5.0.0 installed with singurity) and launching the script + (here brainvisa 5.0.0 installed with singularity) and launching the script from the terminal: >>> bv bash >>> python generate_foldlabels.py @@ -50,11 +50,9 @@ import glob import re import sys -from os.path import abspath -from os.path import exists -from os.path import basename +from os.path import abspath, exists, basename, dirname, join -from deep_folding.brainvisa import exception_handler +from deep_folding.brainvisa import exception_handler, DeepFoldingError from deep_folding.brainvisa.utils.folder import create_folder from deep_folding.brainvisa.utils.subjects import get_number_subjects,\ is_it_a_subject @@ -63,7 +61,7 @@ from deep_folding.brainvisa.utils.logs import setup_log from deep_folding.brainvisa.utils.parallel import define_njobs from deep_folding.brainvisa.utils.foldlabel import \ - generate_foldlabel_from_graph_file + generate_foldlabel_from_graph_file, generate_full_foldlabel from deep_folding.brainvisa.utils.quality_checks import \ compare_number_aims_files_with_expected, \ compare_number_aims_files_with_number_in_source, \ @@ -144,10 +142,11 @@ def parse_args(argv): args = parser.parse_args(argv) + suffix = {"R": "right", "L": "left", "F": "full"} setup_log(args, log_dir=f"{args.output_dir}", prog_name=basename(__file__), - suffix='right' if args.side == 'R' else 'left') + suffix=suffix[args.side]) params = {} @@ -157,6 +156,8 @@ def parse_args(argv): params['side'] = args.side params['junction'] = args.junction params['parallel'] = args.parallel + params['bids'] = args.bids + params['parallel'] = args.parallel # Checks if nb_subjects is either the string "all" or a positive integer params['nb_subjects'] = get_number_subjects(args.nb_subjects) @@ -205,20 +206,54 @@ def generate_one_foldlabel(self, subject: str): """Generates and writes skeleton for one subject. """ # Gets graph file path - graph_path = f"{self.src_dir}/{subject}*/" +\ - f"{self.path_to_graph}/{self.side}*.arg" + if self.side == "F": + graph_path = f"{self.src_dir}/{subject}*/" + \ + f"{self.path_to_graph}/?{subject}*.arg" + else: + graph_path = f"{self.src_dir}/{subject}*/" +\ + f"{self.path_to_graph}/{self.side}*.arg" list_graph_file = glob.glob(graph_path) log.debug(f"list_graph_file = {list_graph_file}") if len(list_graph_file) == 0: - raise RuntimeError(f"No graph file! " - f"{graph_path} doesn't exist") - + raise FileNotFoundError(f"No graph file! " + f"{graph_path} doesn't exist") for graph_file in list_graph_file: - foldlabel_file = self.get_foldlabel_filename(subject, graph_file) - generate_foldlabel_from_graph_file( - graph_file, foldlabel_file, self.junction) - if not self.bids: - break + try: + foldlabel_file = self.get_foldlabel_filename(subject, graph_file) + if self.side == "F": + graph_file_left, graph_file_right, graph_to_remove = \ + self.get_left_and_right_graph_files(graph_file, list_graph_file) + if graph_to_remove: + list_graph_file.remove(graph_to_remove) + generate_full_foldlabel(graph_file_left, graph_file_right, + foldlabel_file, self.junction) + else: + generate_foldlabel_from_graph_file( + graph_file, foldlabel_file, self.junction) + if not self.bids: + break + except DeepFoldingError as e: + log.error(f"Graph file {graph_file} : {e}") + continue + + @staticmethod + def get_left_and_right_graph_files(graph_file, list_graph_file): + graph_name = basename(graph_file) + if graph_name[0] == "L": + graph_file_left = graph_file + graph_file_right = join(dirname(graph_file), f"R{graph_name[1:]}") + if graph_file_right not in list_graph_file: + raise DeepFoldingError(f"Right graph is missing ({graph_file_right})") + else: + graph_to_remove = graph_file_right + else: + graph_file_right = graph_file + graph_file_left = join(dirname(graph_file), f"L{graph_name[1:]}") + if graph_file_left not in list_graph_file: + raise DeepFoldingError(f"Left graph is missing ({graph_file_left})") + else: + graph_to_remove = graph_file_left + return graph_file_left, graph_file_right, graph_to_remove def compute(self, number_subjects): """Loops over subjects and converts graphs into skeletons. diff --git a/deep_folding/brainvisa/generate_skeletons.py b/deep_folding/brainvisa/generate_skeletons.py index 7000704..ba7ecf9 100644 --- a/deep_folding/brainvisa/generate_skeletons.py +++ b/deep_folding/brainvisa/generate_skeletons.py @@ -38,7 +38,7 @@ Typical usage ------------- You can use this program by first entering in the brainvisa environment - (here brainvisa 5.0.0 installed with singurity) and launching the script + (here brainvisa 5.0.0 installed with singularity) and launching the script from the terminal: >>> bv bash >>> python generate_skeletons.py @@ -50,10 +50,9 @@ import glob import re import sys -from os.path import abspath -from os.path import basename +from os.path import abspath, basename, join, dirname -from deep_folding.brainvisa import exception_handler +from deep_folding.brainvisa import exception_handler, DeepFoldingError from deep_folding.brainvisa.utils.folder import create_folder from deep_folding.brainvisa.utils.subjects import \ get_number_subjects, is_it_a_subject @@ -62,7 +61,7 @@ from deep_folding.brainvisa.utils.logs import setup_log from deep_folding.brainvisa.utils.parallel import define_njobs from deep_folding.brainvisa.utils.skeleton import \ - generate_skeleton_from_graph_file + generate_skeleton_from_graph_file, generate_full_skeleton from deep_folding.brainvisa.utils.quality_checks import \ compare_number_aims_files_with_expected, \ get_not_processed_subjects @@ -141,10 +140,11 @@ def parse_args(argv): args = parser.parse_args(argv) + suffix = {"R": "right", "L": "left", "F": "full"} setup_log(args, log_dir=f"{args.output_dir}", prog_name=basename(__file__), - suffix='right' if args.side == 'R' else 'left') + suffix=suffix[args.side]) params = vars(args) @@ -196,20 +196,60 @@ def get_skeleton_filename(self, subject, graph_file): def generate_one_skeleton(self, subject: str): """Generates and writes skeleton for one subject. """ - graph_path = f"{self.src_dir}/{subject}*/" +\ - f"{self.path_to_graph}/{self.side}*.arg" + if self.side == "F": + graph_path = f"{self.src_dir}/{subject}*/" + \ + f"{self.path_to_graph}/?{subject}*.arg" + else: + graph_path = f"{self.src_dir}/{subject}*/" + \ + f"{self.path_to_graph}/{self.side}{subject}*.arg" list_graph_file = glob.glob(graph_path) log.debug(f"list_graph_file = {list_graph_file}") - if len(list_graph_file) == 0: - raise RuntimeError(f"No graph file! " - f"{graph_path} does not exist") + try: + if len(list_graph_file) == 0: + raise FileNotFoundError(f"No graph file! " + f"{graph_path} doesn't exist") + except FileNotFoundError as e: + log.error(f"Subject {subject} : {e}") for graph_file in list_graph_file: - skeleton_file = self.get_skeleton_filename(subject, graph_file) - generate_skeleton_from_graph_file(graph_file, - skeleton_file, - self.junction) - if not self.bids: - break + try: + skeleton_file = self.get_skeleton_filename(subject, graph_file) + if self.side == "F": + graph_file_left, graph_file_right, graph_to_remove = \ + self.get_left_and_right_graph_files(graph_file, list_graph_file) + if graph_to_remove: + list_graph_file.remove(graph_to_remove) + generate_full_skeleton(graph_file_left, + graph_file_right, + skeleton_file, + self.junction) + else: + generate_skeleton_from_graph_file(graph_file, + skeleton_file, + self.junction) + if not self.bids: + break + except DeepFoldingError as e: + log.error(f"Graph file {graph_file} : {e}") + continue + + @staticmethod + def get_left_and_right_graph_files(graph_file, list_graph_file): + graph_name = basename(graph_file) + if graph_name[0] == "L": + graph_file_left = graph_file + graph_file_right = join(dirname(graph_file), f"R{graph_name[1:]}") + if graph_file_right not in list_graph_file: + raise DeepFoldingError(f"Right graph is missing ({graph_file_right})") + else: + graph_to_remove = graph_file_right + else: + graph_file_right = graph_file + graph_file_left = join(dirname(graph_file), f"L{graph_name[1:]}") + if graph_file_left not in list_graph_file: + raise DeepFoldingError(f"Left graph is missing ({graph_file_left})") + else: + graph_to_remove = graph_file_left + return graph_file_left, graph_file_right, graph_to_remove def compute(self, number_subjects): """Loops over subjects and converts graphs into skeletons. @@ -226,8 +266,8 @@ def compute(self, number_subjects): list_subjects = select_subjects_int(list_subjects, number_subjects) log.info(f"Expected number of subjects = {len(list_subjects)}") - log.info(f"list_subjects[:5] = {list_subjects[:5]}") - log.debug(f"list_subjects = {list_subjects}") + log.info(f"list_subjects[:5] = {list_subjects[:5]}") + log.debug(f"list_subjects = {list_subjects}") # Performs computation on all subjects either serially or in parallel if self.parallel: diff --git a/deep_folding/brainvisa/resample_files.py b/deep_folding/brainvisa/resample_files.py index 38dc108..329dcd5 100644 --- a/deep_folding/brainvisa/resample_files.py +++ b/deep_folding/brainvisa/resample_files.py @@ -40,7 +40,7 @@ Typical usage ------------- You can use this program by first entering in the brainvisa environment - (here brainvisa 5.0.0 installed with singurity) and launching the script + (here brainvisa 5.0.0 installed with singularity) and launching the script from the terminal: >>> bv bash >>> python resample_files.py @@ -84,19 +84,17 @@ # Import constants from deep_folding.brainvisa.utils.constants import \ - _ALL_SUBJECTS, _INPUT_TYPE_DEFAULT, _SKELETON_DIR_DEFAULT,\ - _TRANSFORM_DIR_DEFAULT, _RESAMPLED_SKELETON_DIR_DEFAULT,\ + _ALL_SUBJECTS, _INPUT_TYPE_DEFAULT, _SKELETON_DIR_DEFAULT, \ + _TRANSFORM_DIR_DEFAULT, _RESAMPLED_SKELETON_DIR_DEFAULT, \ _RESAMPLED_FOLDLABEL_DIR_DEFAULT, \ _SIDE_DEFAULT, _VOXEL_SIZE_DEFAULT -_SKELETON_FILENAME = "skeleton_generated_" -_FOLDLABEL_FILENAME = "foldlabel_" +_SKELETON_FILENAME = "skeleton_generated" +_FOLDLABEL_FILENAME = "foldlabel" _DISTMAP_FILENAME = "distmap_generated_" -_RESAMPLED_SKELETON_FILENAME = "resampled_skeleton_" -_RESAMPLED_FOLDLABEL_FILENAME = "resampled_foldlabel_" +_RESAMPLED_SKELETON_FILENAME = "resampled_skeleton" +_RESAMPELD_FOLDLABEL_FILENAME = "resampled_foldlabel" _RESAMPLED_DISTMAP_FILENAME = "resampled_distmap_" - - # Defines logger log = set_file_logger(__file__) @@ -126,7 +124,7 @@ def resample_one_skeleton(input_image, # with respect to the natural order # We don't give background, which is the interior 0 values = np.array([11, 60, 30, 35, 10, 20, 40, - 50, 70, 80, 90, 100, 110, 120]) + 50, 70, 80, 90, 100, 110, 120]) # Normalization and resampling of skeleton images resampled = resample(input_image=input_image, @@ -211,11 +209,11 @@ def resample_one_distmap(input_image, # Normalization and resampling of skeleton images cmd_normalize = 'AimsApplyTransform' + \ - ' -i ' + input_image + \ - ' -o ' + resampled_dir + \ - ' -m ' + transfo_file + \ - ' -r ' + ref_file + \ - ' -t linear' + ' -i ' + input_image + \ + ' -o ' + resampled_dir + \ + ' -m ' + transfo_file + \ + ' -r ' + ref_file + \ + ' -t linear' print(cmd_normalize) os.system(cmd_normalize) @@ -398,10 +396,8 @@ def __init__(self, src_dir, resampled_dir, transform_dir, side: either 'L' or 'R', hemisphere side out_voxel_size: float giving voxel size in mm parallel: does parallel computation if True - src_filename : name of skeleton files - (format : ".nii.gz") - output_filename : name of generated files - (format : ".nii.gz") + src_filename : name of skeleton files (format : "_.nii.gz") + output_filename : name of generated files (format : "_.nii.gz") """ super(SkeletonResampler, self).__init__( src_dir=src_dir, resampled_dir=resampled_dir, @@ -410,21 +406,18 @@ def __init__(self, src_dir, resampled_dir, transform_dir, # Names of files in function of dictionary: keys = 'subject' and 'side' # Src directory contains either 'R' or 'L' a subdirectory - # self.src_file = join( - # self.src_dir, - # '%(side)sskeleton_generated_%(subject)s.nii.gz') self.src_file = join(self.src_dir, - f'%(side)s' + src_filename + '%(subject)s.nii.gz') + f"%(side)s{src_filename}_%(subject)s.nii.gz") # Names of files in function of dictionary: keys -> 'subject' and # 'side' self.src_filename = src_filename self.resampled_file = join( self.resampled_dir, - f'%(side)s' + output_filename + '%(subject)s.nii.gz') + f"%(side)s{output_filename}_%(subject)s.nii.gz") # subjects are detected as the nifti file names under src_dir - self.expr = '^.' + src_filename + '(.*).nii.gz$' + self.expr = f"^.{src_filename}_(.*).nii.gz$" @staticmethod def resample_one_subject(src_file: str, @@ -458,10 +451,8 @@ def __init__(self, src_dir, resampled_dir, transform_dir, side: either 'L' or 'R', hemisphere side out_voxel_size: float giving voxel size in mm parallel: does parallel computation if True - src_filename : name of fold label files - (format : ".nii.gz") - output_filename : name of generated files - (format : ".nii.gz") + src_filename : name of fold label files (format : "_.nii.gz") + output_filename : name of generated files (format : "_.nii.gz") """ super(FoldLabelResampler, self).__init__( src_dir=src_dir, resampled_dir=resampled_dir, @@ -472,17 +463,17 @@ def __init__(self, src_dir, resampled_dir, transform_dir, # Src directory contains either 'R' or 'L' a subdirectory self.src_file = join( self.src_dir, - '%(side)s' + src_filename + '%(subject)s.nii.gz') + f"%(side)s{src_filename}_%(subject)s.nii.gz") # Names of files in function of dictionary: keys -> 'subject' and # 'side' self.src_filename = src_filename self.resampled_file = join( self.resampled_dir, - f'%(side)s' + output_filename + '%(subject)s.nii.gz') + f"%(side)s{output_filename}_%(subject)s.nii.gz") # subjects are detected as the nifti file names under src_dir - self.expr = '^.' + src_filename + '(.*).nii.gz$' + self.expr = f"^.{src_filename}_(.*).nii.gz$" @staticmethod def resample_one_subject(src_file: str, @@ -573,8 +564,8 @@ def parse_args(argv): type=str, default=_RESAMPLED_SKELETON_DIR_DEFAULT, help='Target directory where to store the resampled files. ' - 'Default is : ' + - _RESAMPLED_SKELETON_DIR_DEFAULT) + 'Default is : ' + + _RESAMPLED_SKELETON_DIR_DEFAULT) parser.add_argument( "-t", "--transform_dir", type=str, default=_TRANSFORM_DIR_DEFAULT, help='Transform directory containing transform files to ICBM2009c. ' @@ -594,34 +585,30 @@ def parse_args(argv): help='Voxel size of bounding box. ' 'Default is : None') parser.add_argument( - "-f", "--src_filename", type=str, default=_SKELETON_FILENAME, + "-f", "--src_filename", type=str, default=None, help='Filename of sources files. ' - 'Format is : ".nii.gz" ' + 'Format is : "_.nii.gz" ' 'Default is : ' + _SKELETON_FILENAME) parser.add_argument( - "-e", - "--output_filename", - type=str, - default=_RESAMPLED_SKELETON_FILENAME, + "-e", "--output_filename", type=str, default=None, help='Filename of output files. ' - 'Format is : ".nii.gz" ' - 'Default is : ' + - _RESAMPLED_SKELETON_FILENAME) + 'Format is : "_.nii.gz" ' + 'Default is : ' + _RESAMPLED_SKELETON_FILENAME) parser.add_argument( '-v', '--verbose', action='count', default=0, help='Verbose mode: ' - 'If no option is provided then logging.INFO is selected. ' - 'If one option -v (or -vv) or more is provided ' - 'then logging.DEBUG is selected.') + 'If no option is provided then logging.INFO is selected. ' + 'If one option -v (or -vv) or more is provided ' + 'then logging.DEBUG is selected.') params = {} args = parser.parse_args(argv) - + dico_suffix = {"R": "right", "L": "left", "F": "full"} setup_log(args, log_dir=f"{args.output_dir}", prog_name=basename(__file__), - suffix='right' if args.side == 'R' else 'left') + suffix=dico_suffix[args.side]) params['src_dir'] = args.src_dir params['input_type'] = args.input_type @@ -648,7 +635,6 @@ def resample_files( number_subjects=_ALL_SUBJECTS, src_filename=_SKELETON_FILENAME, output_filename=_RESAMPLED_SKELETON_FILENAME): - if input_type == "skeleton": src_filename = (_SKELETON_FILENAME if src_filename is None diff --git a/deep_folding/brainvisa/utils/foldlabel.py b/deep_folding/brainvisa/utils/foldlabel.py index ea71733..0926919 100644 --- a/deep_folding/brainvisa/utils/foldlabel.py +++ b/deep_folding/brainvisa/utils/foldlabel.py @@ -38,6 +38,7 @@ import numpy as np from soma import aims +from deep_folding.brainvisa import DeepFoldingError from deep_folding.brainvisa.utils.graph import create_empty_volume_from_graph from deep_folding.config.logs import set_file_logger @@ -49,7 +50,7 @@ def generate_foldlabel_thin_junction( - graph: aims.Graph) -> aims.Volume: + graph: aims.Graph, volume: aims.Volume) -> aims.Volume: """Converts an aims graph into skeleton and foldlabel volumes It should produce thin junctions as vertices (aims_ss, aims_bottom) @@ -58,7 +59,7 @@ def generate_foldlabel_thin_junction( junctions """ - vol_label = create_empty_volume_from_graph(graph) + vol_label = volume arr_label = np.asarray(vol_label) # Sorted in ascendent priority @@ -122,7 +123,7 @@ def generate_foldlabel_thin_junction( def generate_foldlabel_wide_junction( - graph: aims.Graph) -> aims.Volume: + graph: aims.Graph, volume: aims.Volume) -> aims.Volume: """Converts an aims graph into skeleton and foldlabel volumes It should produce wide junctions as edges (junction, plidepassage) @@ -131,7 +132,7 @@ def generate_foldlabel_wide_junction( simple surface and bottom voxels """ - vol_label = create_empty_volume_from_graph(graph) + vol_label = volume arr_label = np.asarray(vol_label) # Sorted in ascendent priority @@ -183,10 +184,11 @@ def generate_foldlabel_from_graph( graph: aims.Graph, junction: str = _JUNCTION_DEFAULT) -> aims.Volume: """Generates foldlabel from graph""" + volume = create_empty_volume_from_graph(graph) if junction == 'wide': - vol_label = generate_foldlabel_wide_junction(graph) + vol_label = generate_foldlabel_wide_junction(graph, volume) else: - vol_label = generate_foldlabel_thin_junction(graph) + vol_label = generate_foldlabel_thin_junction(graph, volume) return vol_label @@ -197,3 +199,69 @@ def generate_foldlabel_from_graph_file(graph_file: str, graph = aims.read(graph_file) vol_label = generate_foldlabel_from_graph(graph, junction) aims.write(vol_label, foldlabel_file) + + +def generate_full_foldlabel(graph_file_left: str, + graph_file_right: str, + foldlabel_file: str, + junction: str = _JUNCTION_DEFAULT): + """Generates full foldlabel from right and left graph_files""" + graph_left = aims.read(graph_file_left) + graph_right = aims.read(graph_file_right) + + # Sanity check + # TODO: find the good keys to check + keys_to_check = ['voxel_size', 'transformations', 'referentials', 'referential'] + for k in keys_to_check: + try: + if graph_left[k] != graph_right[k]: + raise DeepFoldingError(f"The attribute {k} is not the same in the right graph ({graph_right[k]}) " + f"and in the left graph ({graph_left[k]})") + except KeyError as e: + log.warning(f"The attribute {e} is not in the graphs ({graph_file_right} or {graph_file_left})") + # Get the dimensions for the new volume + boundingbox_max_left = np.asarray(graph_left["boundingbox_max"]) + boundingbox_max_right = np.asarray(graph_right["boundingbox_max"]) + boundingbox_max = np.maximum(boundingbox_max_left, boundingbox_max_right) + log.debug(f"Boundingbox max : {boundingbox_max}") + + # Create empty volumes with the new dimensions + dimensions = (boundingbox_max[0] + 1, + boundingbox_max[1] + 1, + boundingbox_max[2] + 1, + 1) + empty_vol_left = create_empty_volume_from_graph(graph_left, dimensions=dimensions) + empty_vol_right = create_empty_volume_from_graph(graph_right, dimensions=dimensions) + vol_label = create_empty_volume_from_graph(graph_right, dimensions=dimensions) + + # Generate the foldlabel according to the junction + if junction == "wide": + vol_label_left = generate_foldlabel_wide_junction(graph_left, empty_vol_left) + vol_label_right = generate_foldlabel_wide_junction(graph_right, empty_vol_right) + else: + vol_label_left = generate_foldlabel_thin_junction(graph_left, empty_vol_left) + vol_label_right = generate_foldlabel_thin_junction(graph_right, empty_vol_right) + arr_label_left = np.asarray(vol_label_left) + arr_label_right = np.asarray(vol_label_right) + arr_label = np.asarray(vol_label) + + # Add the left and right foldlabels + # To differenciate right and left right foldlabels, values from the right foldlabeld are increased by 10000 + # For contentious voxels (voxels which have two different values in the two foldlabels), + # the value is the one of the left foldlabel + arr_label[arr_label_right > 0] = 10000 + arr_label_right[arr_label_right > 0] + arr_label[arr_label_left > 0] = arr_label_left[arr_label_left > 0] + arr_label = arr_label.astype(int) + + # Sanity checks + # FIXME : select good threshold + threshold = 200 + nb_contentious_voxels = np.count_nonzero(np.logical_and(arr_label_left, arr_label_right)) + log.debug(f"Number of conflict voxels between left and right skeletons : {nb_contentious_voxels}") + if nb_contentious_voxels > threshold: + log.warning(f"Left and right graph files have {nb_contentious_voxels} voxels with different values ! " + f"Graph files : {graph_file_left} and {graph_file_left}") + if np.max(arr_label % 1000) == 999: + raise DeepFoldingError(f"Graph files have too much simple surface to be uniquely identified (max = 1000)" + f"Graph files : {graph_file_left} and {graph_file_left}") + aims.write(vol_label, foldlabel_file) \ No newline at end of file diff --git a/deep_folding/brainvisa/utils/graph.py b/deep_folding/brainvisa/utils/graph.py index 136dbb3..c519cde 100644 --- a/deep_folding/brainvisa/utils/graph.py +++ b/deep_folding/brainvisa/utils/graph.py @@ -42,13 +42,14 @@ log = set_file_logger(__file__) -def create_empty_volume_from_graph(graph: aims.Graph) -> aims.Volume: +def create_empty_volume_from_graph(graph: aims.Graph, dimensions: list = None) -> aims.Volume: """Creates empty volume with graph header""" voxel_size = graph['voxel_size'][:3] - # Adds 1 for each x,y,z dimension - dimensions = [i + j for i, - j in zip(graph['boundingbox_max'], [1, 1, 1, 0])] + if dimensions is None: + # Adds 1 for each x,y,z dimension + dimensions = [i + j for i, + j in zip(graph['boundingbox_max'], [1, 1, 1, 0])] vol = aims.Volume(dimensions, dtype='S16') vol.header()['voxel_size'] = voxel_size diff --git a/deep_folding/brainvisa/utils/skeleton.py b/deep_folding/brainvisa/utils/skeleton.py index 8320ec6..d87674b 100644 --- a/deep_folding/brainvisa/utils/skeleton.py +++ b/deep_folding/brainvisa/utils/skeleton.py @@ -38,6 +38,7 @@ import numpy as np from soma import aims +from deep_folding.brainvisa import DeepFoldingError from deep_folding.brainvisa.utils.graph import create_empty_volume_from_graph from deep_folding.config.logs import set_file_logger @@ -55,15 +56,14 @@ def is_skeleton(arr): def generate_skeleton_thin_junction( - graph: aims.Graph) -> aims.Volume: + graph: aims.Graph, volume: aims.Volume) -> aims.Volume: """Converts an aims graph into skeleton volumes - It should produce thin junctions as vertices (aims_ss, aims_bottom) are written after edges (junction, plidepassage). Thus, when voxels are present in both, simple and bottom surfaces override junctions """ - vol_skel = create_empty_volume_from_graph(graph) + vol_skel = volume arr_skel = np.asarray(vol_skel) for edge in graph.edges(): @@ -111,15 +111,14 @@ def generate_skeleton_thin_junction( def generate_skeleton_wide_junction( - graph: aims.Graph) -> aims.Volume: + graph: aims.Graph, volume: aims.Volume) -> aims.Volume: """Converts an aims graph into skeleton volumes - It should produce wide junctions as edges (junction, plidepassage) are written after vertices (aims_ss, aims_bottom). Thus, when voxels are present in both, junction voxels override simple surface and bottom voxels """ - vol_skel = create_empty_volume_from_graph(graph) + vol_skel = volume arr_skel = np.asarray(vol_skel) cnt_duplicate = 0 @@ -166,10 +165,11 @@ def generate_skeleton_from_graph( graph: aims.Graph, junction: str = _JUNCTION_DEFAULT) -> aims.Volume: """Generates skeleton from graph""" + volume = create_empty_volume_from_graph(graph) if junction == 'wide': - vol_skel = generate_skeleton_wide_junction(graph) + vol_skel = generate_skeleton_wide_junction(graph, volume) else: - vol_skel = generate_skeleton_thin_junction(graph) + vol_skel = generate_skeleton_thin_junction(graph, volume) return vol_skel @@ -182,3 +182,73 @@ def generate_skeleton_from_graph_file(graph_file: str, if not is_skeleton(np.asarray(vol_skeleton)): raise ValueError(f"{skeleton_file} has unexpected skeleton values") aims.write(vol_skeleton, skeleton_file) + + +def generate_full_skeleton(graph_file_left: str, + graph_file_right: str, + skeleton_file: str, + junction: str = _JUNCTION_DEFAULT): + """Generates full skeleton from right and left graph files""" + graph_left = aims.read(graph_file_left) + graph_right = aims.read(graph_file_right) + + # Sanity check + # TODO: find the good keys to check + keys_to_check = ['voxel_size', 'transformations', 'referentials', 'referential'] + for k in keys_to_check: + try: + if graph_left[k] != graph_right[k]: + raise DeepFoldingError(f"The attribute {k} is not the same in the right graph ({graph_right[k]}) " + f"and in the left graph ({graph_left[k]})") + except KeyError as e: + log.warning(f"The attribute {e} is not in the graphs ({graph_file_right} or {graph_file_left})") + # Get the dimensions for the new volume + boundingbox_max_left = np.asarray(graph_left["boundingbox_max"]) + boundingbox_max_right = np.asarray(graph_right["boundingbox_max"]) + boundingbox_max = np.maximum(boundingbox_max_left, boundingbox_max_right) + log.debug(f"Boundingbox max : {boundingbox_max}") + + # Create empty volumes with the new dimensions + dimensions = (boundingbox_max[0] + 1, + boundingbox_max[1] + 1, + boundingbox_max[2] + 1, + 1) + empty_vol_left = create_empty_volume_from_graph(graph_left, dimensions=dimensions) + empty_vol_right = create_empty_volume_from_graph(graph_right, dimensions=dimensions) + vol_skeleton = create_empty_volume_from_graph(graph_right, dimensions=dimensions) + # Generate the skeletons according to the junction + if junction == 'wide': + vol_skeleton_left = generate_skeleton_wide_junction(graph_left, empty_vol_left) + vol_skeleton_right = generate_skeleton_wide_junction(graph_right, empty_vol_right) + else: + vol_skeleton_left = generate_skeleton_thin_junction(graph_left, empty_vol_left) + vol_skeleton_right = generate_skeleton_thin_junction(graph_right, empty_vol_right) + + priority_order = {0: 15, 10: 10, 11: 14, 20: 9, 30: 12, 35: 11, 40: 8, + 50: 7, 60: 13, 70: 6, 80: 5, 90: 4, 100: 3, 110: 2, 120: 1} + arr_skeleton_left = np.asarray(vol_skeleton_left) + arr_skeleton_right = np.asarray(vol_skeleton_right) + arr_skeleton = np.asarray(vol_skeleton) + + # Add the left and right skeletons + # For contentious voxels (voxels which have two different values in the two skeletons), + # the value is chosen according to the priority order + vectorize = np.vectorize(lambda x: priority_order[x]) + mask = vectorize(arr_skeleton_right) >= vectorize(arr_skeleton_left) + arr_skeleton[mask] = arr_skeleton_left[mask] + arr_skeleton[~mask] = arr_skeleton_right[~mask] + arr_skeleton = arr_skeleton.astype(int) + + # Sanity checks + # FIXME : select good threshold + threshold = 200 + nb_contentious_voxels = np.count_nonzero(np.logical_and(arr_skeleton_left, arr_skeleton_right)) + log.debug(f"Unique value of the skeleton : {np.unique(arr_skeleton)}") + log.debug(f"Number of conflict voxels between left and right skeletons : {nb_contentious_voxels}") + if nb_contentious_voxels > threshold: + log.warning(f"Left and right graph files have {nb_contentious_voxels} voxels with different values ! " + f"Graph files : {graph_file_left} and {graph_file_left}") + + if not is_skeleton(np.asarray(vol_skeleton)): + raise DeepFoldingError(f"{skeleton_file} has unexpected skeleton values") + aims.write(vol_skeleton, skeleton_file)