diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e55b1fd --- /dev/null +++ b/.gitignore @@ -0,0 +1,27 @@ +# Python Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# Python distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Mac OS-specific storage files +.DS_Store diff --git a/README.md b/README.md new file mode 100644 index 0000000..ffdd291 --- /dev/null +++ b/README.md @@ -0,0 +1,41 @@ +# Pantheon + +Pantheon is a series of functions and workflows aimed to facilitate analyzing fMRI data. It +provides a unified api to access and manipulate data in a custom layout. It also includes workflows +to create HCP-like data (e.g., functional dtseries data in fsLR 32k space) from fMRIPrep outputs. + +This project is created to meet my own data analysis requirement and preference. It may not +reflect the best practices. But it will constantly evolve with my latest thoughts. + +## Getting Started + +This project mostly relies on python packages. But several common neuroimaging tools are also +required to complete certain operations. + +### Dependencies + +Python +- Required + - numpy + - pandas + - nibabel + - nilearn + - rpy2 +- Optional + - rpy2 + - plotly + +Neuroimaging tools +- AFNI +- FSL +- FreeSurfer +- Workbench + + +## License + +Distributed under the MIT License. + +## Contact + +Zhifang Ye - zhifang.ye.fghm@gmail.com diff --git a/pantheon/__init__.py b/pantheon/__init__.py new file mode 100644 index 0000000..a5682fb --- /dev/null +++ b/pantheon/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/pantheon/data/atlas_example.yaml b/pantheon/data/atlas_example.yaml new file mode 100644 index 0000000..078e5d6 --- /dev/null +++ b/pantheon/data/atlas_example.yaml @@ -0,0 +1,20 @@ +--- +fsLR: + MMP1: + L: data/external/atlas/MMP1/S1200_space-fsLR_den-32k_desc-MMP1_dseg.dlabel.nii + R: data/external/atlas/MMP1/S1200_space-fsLR_den-32k_desc-MMP1_dseg.dlabel.nii + MMP1_Subcortical: + L: data/external/atlas/MMP1/S1200_space-MNI152NLin2009cAsym_res-02_desc-MMP1_dseg.nii.gz + R: data/external/atlas/MMP1/S1200_space-MNI152NLin2009cAsym_res-02_desc-MMP1_dseg.nii.gz +fsaverage: + Brainnetome: + L: data/external/atlas/Brainnetome/fsaverage.L.BN_Atlas.32k_fs_LR.label.gii + R: data/external/atlas/Brainnetome/fsaverage.R.BN_Atlas.32k_fs_LR.label.gii +MNI152NLin6Asym: + Template_ASeg: + L: data/external/atlas/Template_ASeg/MNI_space-MNI152NLin6Asym_res-2_desc-Subcortical_dseg.nii.gz + R: data/external/atlas/Template_ASeg/MNI_space-MNI152NLin6Asym_res-2_desc-Subcortical_dseg.nii.gz +T1w: + Custom: + L: data/external/atlas/Custom/{sub_id}/{sub_id}_hemi-L.label.gii + R: data/external/atlas/Custom/{sub_id}/{sub_id}_hemi-R.label.gii diff --git a/pantheon/data/data_validation_example.yaml b/pantheon/data/data_validation_example.yaml new file mode 100644 index 0000000..6adf5f7 --- /dev/null +++ b/pantheon/data/data_validation_example.yaml @@ -0,0 +1,13 @@ +task: + task-encoding: + run_list: [run-1, run-2] # a list indicates all possible runs + exclude: + sub-023: [run-1] # map subject and runs need to be excluded + task-retrieval: + run_list: [run-1, run-2, run-3] + exclude: {} + task-movie: + run_list: [] # an empty list for task with only one scan + exclude: + sub-005: [] # leave the run list empty +exclude_subject: [sub-001] diff --git a/pantheon/data/hcp_config/FreeSurferAllLut.txt b/pantheon/data/hcp_config/FreeSurferAllLut.txt new file mode 100644 index 0000000..5001911 --- /dev/null +++ b/pantheon/data/hcp_config/FreeSurferAllLut.txt @@ -0,0 +1,2530 @@ +LEFT-CEREBRAL-EXTERIOR +1 70 130 180 255 +LEFT-CEREBRAL-WHITE-MATTER +2 245 245 245 255 +LEFT-CEREBRAL-CORTEX +3 205 62 78 255 +LEFT-LATERAL-VENTRICLE +4 120 18 134 255 +LEFT-INF-LAT-VENT +5 196 58 250 255 +LEFT-CEREBELLUM-EXTERIOR +6 0 148 0 255 +LEFT-CEREBELLUM-WHITE-MATTER +7 220 248 164 255 +LEFT-CEREBELLUM-CORTEX +8 230 148 34 255 +LEFT-THALAMUS +9 0 118 14 255 +LEFT-THALAMUS-PROPER +10 0 118 14 255 +LEFT-CAUDATE +11 122 186 220 255 +LEFT-PUTAMEN +12 236 13 176 255 +LEFT-PALLIDUM +13 12 48 255 255 +3RD-VENTRICLE +14 204 182 142 255 +4TH-VENTRICLE +15 42 204 164 255 +BRAIN-STEM +16 119 159 176 255 +LEFT-HIPPOCAMPUS +17 220 216 20 255 +LEFT-AMYGDALA +18 103 255 255 255 +LEFT-INSULA +19 80 196 98 255 +LEFT-OPERCULUM +20 60 58 210 255 +LINE-1 +21 60 58 210 255 +LINE-2 +22 60 58 210 255 +LINE-3 +23 60 58 210 255 +CSF +24 60 60 60 255 +LEFT-LESION +25 255 165 0 255 +LEFT-ACCUMBENS-AREA +26 255 165 0 255 +LEFT-SUBSTANCIA-NIGRA +27 0 255 127 255 +LEFT-VENTRALDC +28 165 42 42 255 +LEFT-UNDETERMINED +29 135 206 235 255 +LEFT-VESSEL +30 160 32 240 255 +LEFT-CHOROID-PLEXUS +31 0 200 200 255 +LEFT-F3ORB +32 100 50 100 255 +LEFT-LOG +33 135 50 74 255 +LEFT-AOG +34 122 135 50 255 +LEFT-MOG +35 51 50 135 255 +LEFT-POG +36 74 155 60 255 +LEFT-STELLATE +37 120 62 43 255 +LEFT-PORG +38 74 155 60 255 +LEFT-AORG +39 122 135 50 255 +RIGHT-CEREBRAL-EXTERIOR +40 70 130 180 255 +RIGHT-CEREBRAL-WHITE-MATTER +41 0 225 0 255 +RIGHT-CEREBRAL-CORTEX +42 205 62 78 255 +RIGHT-LATERAL-VENTRICLE +43 120 18 134 255 +RIGHT-INF-LAT-VENT +44 196 58 250 255 +RIGHT-CEREBELLUM-EXTERIOR +45 0 148 0 255 +RIGHT-CEREBELLUM-WHITE-MATTER +46 220 248 164 255 +RIGHT-CEREBELLUM-CORTEX +47 230 148 34 255 +RIGHT-THALAMUS +48 0 118 14 255 +RIGHT-THALAMUS-PROPER +49 0 118 14 255 +RIGHT-CAUDATE +50 122 186 220 255 +RIGHT-PUTAMEN +51 236 13 176 255 +RIGHT-PALLIDUM +52 13 48 255 255 +RIGHT-HIPPOCAMPUS +53 220 216 20 255 +RIGHT-AMYGDALA +54 103 255 255 255 +RIGHT-INSULA +55 80 196 98 255 +RIGHT-OPERCULUM +56 60 58 210 255 +RIGHT-LESION +57 255 165 0 255 +RIGHT-ACCUMBENS-AREA +58 255 165 0 255 +RIGHT-SUBSTANCIA-NIGRA +59 0 255 127 255 +RIGHT-VENTRALDC +60 165 42 42 255 +RIGHT-UNDETERMINED +61 135 206 235 255 +RIGHT-VESSEL +62 160 32 240 255 +RIGHT-CHOROID-PLEXUS +63 0 200 221 255 +RIGHT-F3ORB +64 100 50 100 255 +RIGHT-LOG +65 135 50 74 255 +RIGHT-AOG +66 122 135 50 255 +RIGHT-MOG +67 51 50 135 255 +RIGHT-POG +68 74 155 60 255 +RIGHT-STELLATE +69 120 62 43 255 +RIGHT-PORG +70 74 155 60 255 +RIGHT-AORG +71 122 135 50 255 +5TH-VENTRICLE +72 120 190 150 255 +LEFT-INTERIOR +73 122 135 50 255 +RIGHT-INTERIOR +74 122 135 50 255 +WM-HYPOINTENSITIES +77 200 70 255 255 +LEFT-WM-HYPOINTENSITIES +78 255 148 10 255 +RIGHT-WM-HYPOINTENSITIES +79 255 148 10 255 +NON-WM-HYPOINTENSITIES +80 164 108 226 255 +LEFT-NON-WM-HYPOINTENSITIES +81 164 108 226 255 +RIGHT-NON-WM-HYPOINTENSITIES +82 164 108 226 255 +LEFT-F1 +83 255 218 185 255 +RIGHT-F1 +84 255 218 185 255 +OPTIC-CHIASM +85 234 169 30 255 +CORPUS_CALLOSUM +192 250 255 50 255 +LEFT_FUTURE_WMSA +86 200 120 255 255 +RIGHT_FUTURE_WMSA +87 200 121 255 255 +FUTURE_WMSA +88 200 122 255 255 +LEFT-AMYGDALA-ANTERIOR +96 205 10 125 255 +RIGHT-AMYGDALA-ANTERIOR +97 205 10 125 255 +DURA +98 160 32 240 255 +LEFT-WM-INTENSITY-ABNORMALITY +100 124 140 178 255 +LEFT-CAUDATE-INTENSITY-ABNORMALITY +101 125 140 178 255 +LEFT-PUTAMEN-INTENSITY-ABNORMALITY +102 126 140 178 255 +LEFT-ACCUMBENS-INTENSITY-ABNORMALITY +103 127 140 178 255 +LEFT-PALLIDUM-INTENSITY-ABNORMALITY +104 124 141 178 255 +LEFT-AMYGDALA-INTENSITY-ABNORMALITY +105 124 142 178 255 +LEFT-HIPPOCAMPUS-INTENSITY-ABNORMALITY +106 124 143 178 255 +LEFT-THALAMUS-INTENSITY-ABNORMALITY +107 124 144 178 255 +LEFT-VDC-INTENSITY-ABNORMALITY +108 124 140 179 255 +RIGHT-WM-INTENSITY-ABNORMALITY +109 124 140 178 255 +RIGHT-CAUDATE-INTENSITY-ABNORMALITY +110 125 140 178 255 +RIGHT-PUTAMEN-INTENSITY-ABNORMALITY +111 126 140 178 255 +RIGHT-ACCUMBENS-INTENSITY-ABNORMALITY +112 127 140 178 255 +RIGHT-PALLIDUM-INTENSITY-ABNORMALITY +113 124 141 178 255 +RIGHT-AMYGDALA-INTENSITY-ABNORMALITY +114 124 142 178 255 +RIGHT-HIPPOCAMPUS-INTENSITY-ABNORMALITY +115 124 143 178 255 +RIGHT-THALAMUS-INTENSITY-ABNORMALITY +116 124 144 178 255 +RIGHT-VDC-INTENSITY-ABNORMALITY +117 124 140 179 255 +EPIDERMIS +118 255 20 147 255 +CONN-TISSUE +119 205 179 139 255 +SC-FAT-MUSCLE +120 238 238 209 255 +CRANIUM +121 200 200 200 255 +CSF-SA +122 74 255 74 255 +MUSCLE +123 238 0 0 255 +EAR +124 0 0 139 255 +ADIPOSE +125 173 255 47 255 +SPINAL-CORD +126 133 203 229 255 +SOFT-TISSUE +127 26 237 57 255 +NERVE +128 34 139 34 255 +BONE +129 30 144 255 255 +AIR +130 147 19 173 255 +ORBITAL-FAT +131 238 59 59 255 +TONGUE +132 221 39 200 255 +NASAL-STRUCTURES +133 238 174 238 255 +GLOBE +134 255 0 0 255 +TEETH +135 72 61 139 255 +LEFT-CAUDATE-PUTAMEN +136 21 39 132 255 +RIGHT-CAUDATE-PUTAMEN +137 21 39 132 255 +LEFT-CLAUSTRUM +138 65 135 20 255 +RIGHT-CLAUSTRUM +139 65 135 20 255 +CORNEA +140 134 4 160 255 +DIPLOE +142 221 226 68 255 +VITREOUS-HUMOR +143 255 255 254 255 +LENS +144 52 209 226 255 +AQUEOUS-HUMOR +145 239 160 223 255 +OUTER-TABLE +146 70 130 180 255 +INNER-TABLE +147 70 130 181 255 +PERIOSTEUM +148 139 121 94 255 +ENDOSTEUM +149 224 224 224 255 +R-C-S +150 255 0 0 255 +IRIS +151 205 205 0 255 +SC-ADIPOSE-MUSCLE +152 238 238 209 255 +SC-TISSUE +153 139 121 94 255 +ORBITAL-ADIPOSE +154 238 59 59 255 +LEFT-INTCAPSULE-ANT +155 238 59 59 255 +RIGHT-INTCAPSULE-ANT +156 238 59 59 255 +LEFT-INTCAPSULE-POS +157 62 10 205 255 +RIGHT-INTCAPSULE-POS +158 62 10 205 255 +LEFT-CEREBRAL-WM-UNMYELINATED +159 0 118 14 255 +RIGHT-CEREBRAL-WM-UNMYELINATED +160 0 118 14 255 +LEFT-CEREBRAL-WM-MYELINATED +161 220 216 21 255 +RIGHT-CEREBRAL-WM-MYELINATED +162 220 216 21 255 +LEFT-SUBCORTICAL-GRAY-MATTER +163 122 186 220 255 +RIGHT-SUBCORTICAL-GRAY-MATTER +164 122 186 220 255 +SKULL +165 255 165 0 255 +POSTERIOR-FOSSA +166 14 48 255 255 +SCALP +167 166 42 42 255 +HEMATOMA +168 121 18 134 255 +LEFT-BASAL-GANGLIA +169 236 13 127 255 +RIGHT-BASAL-GANGLIA +176 236 13 126 255 +BRAINSTEM +170 119 159 176 255 +DCG +171 119 0 176 255 +VERMIS +172 119 100 176 255 +MIDBRAIN +173 119 200 176 255 +PONS +174 119 159 100 255 +MEDULLA +175 119 159 200 255 +LEFT-CORTICAL-DYSPLASIA +180 73 61 139 255 +RIGHT-CORTICAL-DYSPLASIA +181 73 62 139 255 +LEFT-HIPPOCAMPAL_FISSURE +193 0 196 255 255 +LEFT-CADG-HEAD +194 255 164 164 255 +LEFT-SUBICULUM +195 196 196 0 255 +LEFT-FIMBRIA +196 0 100 255 255 +RIGHT-HIPPOCAMPAL_FISSURE +197 128 196 164 255 +RIGHT-CADG-HEAD +198 0 126 75 255 +RIGHT-SUBICULUM +199 128 96 64 255 +RIGHT-FIMBRIA +200 0 50 128 255 +ALVEUS +201 255 204 153 255 +PERFORANT_PATHWAY +202 255 128 128 255 +PARASUBICULUM +203 255 255 0 255 +PRESUBICULUM +204 64 0 64 255 +SUBICULUM +205 0 0 255 255 +CA1 +206 255 0 0 255 +CA2 +207 128 128 255 255 +CA3 +208 0 128 0 255 +CA4 +209 196 160 128 255 +GC-ML-DG +210 32 200 255 255 +HATA +211 128 255 128 255 +FIMBRIA +212 204 153 204 255 +LATERAL_VENTRICLE +213 121 17 136 255 +MOLECULAR_LAYER_HP +214 128 0 0 255 +HIPPOCAMPAL_FISSURE +215 128 32 255 255 +ENTORHINAL_CORTEX +216 255 204 102 255 +MOLECULAR_LAYER_SUBICULUM +217 128 128 128 255 +AMYGDALA +218 104 255 255 255 +CEREBRAL_WHITE_MATTER +219 0 226 0 255 +CEREBRAL_CORTEX +220 205 63 78 255 +INF_LAT_VENT +221 197 58 250 255 +PERIRHINAL +222 33 150 250 255 +CEREBRAL_WHITE_MATTER_EDGE +223 226 0 0 255 +BACKGROUND +224 100 100 100 255 +ECTORHINAL +225 197 150 250 255 +HP_TAIL +226 170 170 255 255 +FORNIX +250 255 0 0 255 +CC_POSTERIOR +251 0 0 64 255 +CC_MID_POSTERIOR +252 0 0 112 255 +CC_CENTRAL +253 0 0 160 255 +CC_MID_ANTERIOR +254 0 0 208 255 +CC_ANTERIOR +255 0 0 255 255 +VOXEL-UNCHANGED +256 0 0 0 255 +AORTA +331 255 0 0 255 +LEFT-COMMON-ILIACA +332 255 80 0 255 +RIGHT-COMMON-ILIACA +333 255 160 0 255 +LEFT-EXTERNAL-ILIACA +334 255 255 0 255 +RIGHT-EXTERNAL-ILIACA +335 0 255 0 255 +LEFT-INTERNAL-ILIACA +336 255 0 160 255 +RIGHT-INTERNAL-ILIACA +337 255 0 255 255 +LEFT-LATERAL-SACRALA +338 255 50 80 255 +RIGHT-LATERAL-SACRALA +339 80 255 50 255 +LEFT-OBTURATORA +340 160 255 50 255 +RIGHT-OBTURATORA +341 160 200 255 255 +LEFT-INTERNAL-PUDENDALA +342 0 255 160 255 +RIGHT-INTERNAL-PUDENDALA +343 0 0 255 255 +LEFT-UMBILICALA +344 80 50 255 255 +RIGHT-UMBILICALA +345 160 0 255 255 +LEFT-INF-RECTALA +346 255 210 0 255 +RIGHT-INF-RECTALA +347 0 160 255 255 +LEFT-COMMON-ILIACV +348 255 200 80 255 +RIGHT-COMMON-ILIACV +349 255 200 160 255 +LEFT-EXTERNAL-ILIACV +350 255 80 200 255 +RIGHT-EXTERNAL-ILIACV +351 255 160 200 255 +LEFT-INTERNAL-ILIACV +352 30 255 80 255 +RIGHT-INTERNAL-ILIACV +353 80 200 255 255 +LEFT-OBTURATORV +354 80 255 200 255 +RIGHT-OBTURATORV +355 195 255 200 255 +LEFT-INTERNAL-PUDENDALV +356 120 200 20 255 +RIGHT-INTERNAL-PUDENDALV +357 170 10 200 255 +POS-LYMPH +358 20 130 180 255 +NEG-LYMPH +359 20 180 130 255 +V1 +400 206 62 78 255 +V2 +401 121 18 134 255 +BA44 +402 199 58 250 255 +BA45 +403 1 148 0 255 +BA4A +404 221 248 164 255 +BA4P +405 231 148 34 255 +BA6 +406 1 118 14 255 +BA2 +407 120 118 14 255 +BA1_OLD +408 123 186 221 255 +BAUN2 +409 238 13 177 255 +BA1 +410 123 186 220 255 +BA2B +411 138 13 206 255 +BA3A +412 238 130 176 255 +BA3B +413 218 230 76 255 +MT +414 38 213 176 255 +AIPS_AIP_L +415 1 225 176 255 +AIPS_AIP_R +416 1 225 176 255 +AIPS_VIP_L +417 200 2 100 255 +AIPS_VIP_R +418 200 2 100 255 +IPL_PFCM_L +419 5 200 90 255 +IPL_PFCM_R +420 5 200 90 255 +IPL_PF_L +421 100 5 200 255 +IPL_PFM_L +422 25 255 100 255 +IPL_PFM_R +423 25 255 100 255 +IPL_PFOP_L +424 230 7 100 255 +IPL_PFOP_R +425 230 7 100 255 +IPL_PF_R +426 100 5 200 255 +IPL_PFT_L +427 150 10 200 255 +IPL_PFT_R +428 150 10 200 255 +IPL_PGA_L +429 175 10 176 255 +IPL_PGA_R +430 175 10 176 255 +IPL_PGP_L +431 10 100 255 255 +IPL_PGP_R +432 10 100 255 255 +VISUAL_V3D_L +433 150 45 70 255 +VISUAL_V3D_R +434 150 45 70 255 +VISUAL_V4_L +435 45 200 15 255 +VISUAL_V4_R +436 45 200 15 255 +VISUAL_V5_B +437 227 45 100 255 +VISUAL_VP_L +438 227 45 100 255 +VISUAL_VP_R +439 227 45 100 255 +WMSA +498 143 188 143 255 +OTHER_WMSA +499 255 248 220 255 +RIGHT_CA2_3 +500 17 85 136 255 +RIGHT_ALVEUS +501 119 187 102 255 +RIGHT_CA1 +502 204 68 34 255 +RIGHT_FIMBRIA +503 204 0 255 255 +RIGHT_PRESUBICULUM +504 221 187 17 255 +RIGHT_HIPPOCAMPAL_FISSURE +505 153 221 238 255 +RIGHT_CA4_DG +506 51 17 17 255 +RIGHT_SUBICULUM +507 0 119 85 255 +RIGHT_FORNIX +508 20 100 200 255 +LEFT_CA2_3 +550 17 85 137 255 +LEFT_ALVEUS +551 119 187 103 255 +LEFT_CA1 +552 204 68 35 255 +LEFT_FIMBRIA +553 204 0 254 255 +LEFT_PRESUBICULUM +554 221 187 16 255 +LEFT_HIPPOCAMPAL_FISSURE +555 153 221 239 255 +LEFT_CA4_DG +556 51 17 18 255 +LEFT_SUBICULUM +557 0 119 86 255 +LEFT_FORNIX +558 20 100 201 255 +TUMOR +600 254 254 254 255 +CBM_LEFT_I_IV +601 70 130 180 255 +CBM_RIGHT_I_IV +602 245 245 245 255 +CBM_LEFT_V +603 205 62 78 255 +CBM_RIGHT_V +604 120 18 134 255 +CBM_LEFT_VI +605 196 58 250 255 +CBM_VERMIS_VI +606 0 148 0 255 +CBM_RIGHT_VI +607 220 248 164 255 +CBM_LEFT_CRUSI +608 230 148 34 255 +CBM_VERMIS_CRUSI +609 0 118 14 255 +CBM_RIGHT_CRUSI +610 0 118 14 255 +CBM_LEFT_CRUSII +611 122 186 220 255 +CBM_VERMIS_CRUSII +612 236 13 176 255 +CBM_RIGHT_CRUSII +613 12 48 255 255 +CBM_LEFT_VIIB +614 204 182 142 255 +CBM_VERMIS_VIIB +615 42 204 164 255 +CBM_RIGHT_VIIB +616 119 159 176 255 +CBM_LEFT_VIIIA +617 220 216 20 255 +CBM_VERMIS_VIIIA +618 103 255 255 255 +CBM_RIGHT_VIIIA +619 80 196 98 255 +CBM_LEFT_VIIIB +620 60 58 210 255 +CBM_VERMIS_VIIIB +621 60 58 210 255 +CBM_RIGHT_VIIIB +622 60 58 210 255 +CBM_LEFT_IX +623 60 58 210 255 +CBM_VERMIS_IX +624 60 60 60 255 +CBM_RIGHT_IX +625 255 165 0 255 +CBM_LEFT_X +626 255 165 0 255 +CBM_VERMIS_X +627 0 255 127 255 +CBM_RIGHT_X +628 165 42 42 255 +CBM_RIGHT_I_V_MED +640 204 0 0 255 +CBM_RIGHT_I_V_MID +641 255 0 0 255 +CBM_RIGHT_VI_MED +642 0 0 255 255 +CBM_RIGHT_VI_MID +643 30 144 255 255 +CBM_RIGHT_VI_LAT +644 100 212 237 255 +CBM_RIGHT_CRUSI_MED +645 218 165 32 255 +CBM_RIGHT_CRUSI_MID +646 255 215 0 255 +CBM_RIGHT_CRUSI_LAT +647 255 255 166 255 +CBM_RIGHT_CRUSII_MED +648 153 0 204 255 +CBM_RIGHT_CRUSII_MID +649 153 141 209 255 +CBM_RIGHT_CRUSII_LAT +650 204 204 255 255 +CBM_RIGHT_7MED +651 31 212 194 255 +CBM_RIGHT_7MID +652 3 255 237 255 +CBM_RIGHT_7LAT +653 204 255 255 255 +CBM_RIGHT_8MED +654 86 74 147 255 +CBM_RIGHT_8MID +655 114 114 190 255 +CBM_RIGHT_8LAT +656 184 178 255 255 +CBM_RIGHT_PUNS +657 126 138 37 255 +CBM_RIGHT_TONS +658 189 197 117 255 +CBM_RIGHT_FLOS +659 240 230 140 255 +CBM_LEFT_I_V_MED +660 204 0 0 255 +CBM_LEFT_I_V_MID +661 255 0 0 255 +CBM_LEFT_VI_MED +662 0 0 255 255 +CBM_LEFT_VI_MID +663 30 144 255 255 +CBM_LEFT_VI_LAT +664 100 212 237 255 +CBM_LEFT_CRUSI_MED +665 218 165 32 255 +CBM_LEFT_CRUSI_MID +666 255 215 0 255 +CBM_LEFT_CRUSI_LAT +667 255 255 166 255 +CBM_LEFT_CRUSII_MED +668 153 0 204 255 +CBM_LEFT_CRUSII_MID +669 153 141 209 255 +CBM_LEFT_CRUSII_LAT +670 204 204 255 255 +CBM_LEFT_7MED +671 31 212 194 255 +CBM_LEFT_7MID +672 3 255 237 255 +CBM_LEFT_7LAT +673 204 255 255 255 +CBM_LEFT_8MED +674 86 74 147 255 +CBM_LEFT_8MID +675 114 114 190 255 +CBM_LEFT_8LAT +676 184 178 255 255 +CBM_LEFT_PUNS +677 126 138 37 255 +CBM_LEFT_TONS +678 189 197 117 255 +CBM_LEFT_FLOS +679 240 230 140 255 +CSF-FSL-FAST +701 120 18 134 255 +GRAYMATTER-FSL-FAST +702 205 62 78 255 +WHITEMATTER-FSL-FAST +703 0 225 0 255 +SUSPICIOUS +999 255 100 100 255 +CTX-LH-UNKNOWN +1000 25 5 25 255 +CTX-LH-BANKSSTS +1001 25 100 40 255 +CTX-LH-CAUDALANTERIORCINGULATE +1002 125 100 160 255 +CTX-LH-CAUDALMIDDLEFRONTAL +1003 100 25 0 255 +CTX-LH-CORPUSCALLOSUM +1004 120 70 50 255 +CTX-LH-CUNEUS +1005 220 20 100 255 +CTX-LH-ENTORHINAL +1006 220 20 10 255 +CTX-LH-FUSIFORM +1007 180 220 140 255 +CTX-LH-INFERIORPARIETAL +1008 220 60 220 255 +CTX-LH-INFERIORTEMPORAL +1009 180 40 120 255 +CTX-LH-ISTHMUSCINGULATE +1010 140 20 140 255 +CTX-LH-LATERALOCCIPITAL +1011 20 30 140 255 +CTX-LH-LATERALORBITOFRONTAL +1012 35 75 50 255 +CTX-LH-LINGUAL +1013 225 140 140 255 +CTX-LH-MEDIALORBITOFRONTAL +1014 200 35 75 255 +CTX-LH-MIDDLETEMPORAL +1015 160 100 50 255 +CTX-LH-PARAHIPPOCAMPAL +1016 20 220 60 255 +CTX-LH-PARACENTRAL +1017 60 220 60 255 +CTX-LH-PARSOPERCULARIS +1018 220 180 140 255 +CTX-LH-PARSORBITALIS +1019 20 100 50 255 +CTX-LH-PARSTRIANGULARIS +1020 220 60 20 255 +CTX-LH-PERICALCARINE +1021 120 100 60 255 +CTX-LH-POSTCENTRAL +1022 220 20 20 255 +CTX-LH-POSTERIORCINGULATE +1023 220 180 220 255 +CTX-LH-PRECENTRAL +1024 60 20 220 255 +CTX-LH-PRECUNEUS +1025 160 140 180 255 +CTX-LH-ROSTRALANTERIORCINGULATE +1026 80 20 140 255 +CTX-LH-ROSTRALMIDDLEFRONTAL +1027 75 50 125 255 +CTX-LH-SUPERIORFRONTAL +1028 20 220 160 255 +CTX-LH-SUPERIORPARIETAL +1029 20 180 140 255 +CTX-LH-SUPERIORTEMPORAL +1030 140 220 220 255 +CTX-LH-SUPRAMARGINAL +1031 80 160 20 255 +CTX-LH-FRONTALPOLE +1032 100 0 100 255 +CTX-LH-TEMPORALPOLE +1033 70 70 70 255 +CTX-LH-TRANSVERSETEMPORAL +1034 150 150 200 255 +CTX-LH-INSULA +1035 255 192 32 255 +CTX-RH-UNKNOWN +2000 25 5 25 255 +CTX-RH-BANKSSTS +2001 25 100 40 255 +CTX-RH-CAUDALANTERIORCINGULATE +2002 125 100 160 255 +CTX-RH-CAUDALMIDDLEFRONTAL +2003 100 25 0 255 +CTX-RH-CORPUSCALLOSUM +2004 120 70 50 255 +CTX-RH-CUNEUS +2005 220 20 100 255 +CTX-RH-ENTORHINAL +2006 220 20 10 255 +CTX-RH-FUSIFORM +2007 180 220 140 255 +CTX-RH-INFERIORPARIETAL +2008 220 60 220 255 +CTX-RH-INFERIORTEMPORAL +2009 180 40 120 255 +CTX-RH-ISTHMUSCINGULATE +2010 140 20 140 255 +CTX-RH-LATERALOCCIPITAL +2011 20 30 140 255 +CTX-RH-LATERALORBITOFRONTAL +2012 35 75 50 255 +CTX-RH-LINGUAL +2013 225 140 140 255 +CTX-RH-MEDIALORBITOFRONTAL +2014 200 35 75 255 +CTX-RH-MIDDLETEMPORAL +2015 160 100 50 255 +CTX-RH-PARAHIPPOCAMPAL +2016 20 220 60 255 +CTX-RH-PARACENTRAL +2017 60 220 60 255 +CTX-RH-PARSOPERCULARIS +2018 220 180 140 255 +CTX-RH-PARSORBITALIS +2019 20 100 50 255 +CTX-RH-PARSTRIANGULARIS +2020 220 60 20 255 +CTX-RH-PERICALCARINE +2021 120 100 60 255 +CTX-RH-POSTCENTRAL +2022 220 20 20 255 +CTX-RH-POSTERIORCINGULATE +2023 220 180 220 255 +CTX-RH-PRECENTRAL +2024 60 20 220 255 +CTX-RH-PRECUNEUS +2025 160 140 180 255 +CTX-RH-ROSTRALANTERIORCINGULATE +2026 80 20 140 255 +CTX-RH-ROSTRALMIDDLEFRONTAL +2027 75 50 125 255 +CTX-RH-SUPERIORFRONTAL +2028 20 220 160 255 +CTX-RH-SUPERIORPARIETAL +2029 20 180 140 255 +CTX-RH-SUPERIORTEMPORAL +2030 140 220 220 255 +CTX-RH-SUPRAMARGINAL +2031 80 160 20 255 +CTX-RH-FRONTALPOLE +2032 100 0 100 255 +CTX-RH-TEMPORALPOLE +2033 70 70 70 255 +CTX-RH-TRANSVERSETEMPORAL +2034 150 150 200 255 +CTX-RH-INSULA +2035 255 192 32 255 +WM-LH-UNKNOWN +3000 230 250 230 255 +WM-LH-BANKSSTS +3001 230 155 215 255 +WM-LH-CAUDALANTERIORCINGULATE +3002 130 155 95 255 +WM-LH-CAUDALMIDDLEFRONTAL +3003 155 230 255 255 +WM-LH-CORPUSCALLOSUM +3004 135 185 205 255 +WM-LH-CUNEUS +3005 35 235 155 255 +WM-LH-ENTORHINAL +3006 35 235 245 255 +WM-LH-FUSIFORM +3007 75 35 115 255 +WM-LH-INFERIORPARIETAL +3008 35 195 35 255 +WM-LH-INFERIORTEMPORAL +3009 75 215 135 255 +WM-LH-ISTHMUSCINGULATE +3010 115 235 115 255 +WM-LH-LATERALOCCIPITAL +3011 235 225 115 255 +WM-LH-LATERALORBITOFRONTAL +3012 220 180 205 255 +WM-LH-LINGUAL +3013 30 115 115 255 +WM-LH-MEDIALORBITOFRONTAL +3014 55 220 180 255 +WM-LH-MIDDLETEMPORAL +3015 95 155 205 255 +WM-LH-PARAHIPPOCAMPAL +3016 235 35 195 255 +WM-LH-PARACENTRAL +3017 195 35 195 255 +WM-LH-PARSOPERCULARIS +3018 35 75 115 255 +WM-LH-PARSORBITALIS +3019 235 155 205 255 +WM-LH-PARSTRIANGULARIS +3020 35 195 235 255 +WM-LH-PERICALCARINE +3021 135 155 195 255 +WM-LH-POSTCENTRAL +3022 35 235 235 255 +WM-LH-POSTERIORCINGULATE +3023 35 75 35 255 +WM-LH-PRECENTRAL +3024 195 235 35 255 +WM-LH-PRECUNEUS +3025 95 115 75 255 +WM-LH-ROSTRALANTERIORCINGULATE +3026 175 235 115 255 +WM-LH-ROSTRALMIDDLEFRONTAL +3027 180 205 130 255 +WM-LH-SUPERIORFRONTAL +3028 235 35 95 255 +WM-LH-SUPERIORPARIETAL +3029 235 75 115 255 +WM-LH-SUPERIORTEMPORAL +3030 115 35 35 255 +WM-LH-SUPRAMARGINAL +3031 175 95 235 255 +WM-LH-FRONTALPOLE +3032 155 255 155 255 +WM-LH-TEMPORALPOLE +3033 185 185 185 255 +WM-LH-TRANSVERSETEMPORAL +3034 105 105 55 255 +WM-LH-INSULA +3035 254 191 31 255 +WM-RH-UNKNOWN +4000 230 250 230 255 +WM-RH-BANKSSTS +4001 230 155 215 255 +WM-RH-CAUDALANTERIORCINGULATE +4002 130 155 95 255 +WM-RH-CAUDALMIDDLEFRONTAL +4003 155 230 255 255 +WM-RH-CORPUSCALLOSUM +4004 135 185 205 255 +WM-RH-CUNEUS +4005 35 235 155 255 +WM-RH-ENTORHINAL +4006 35 235 245 255 +WM-RH-FUSIFORM +4007 75 35 115 255 +WM-RH-INFERIORPARIETAL +4008 35 195 35 255 +WM-RH-INFERIORTEMPORAL +4009 75 215 135 255 +WM-RH-ISTHMUSCINGULATE +4010 115 235 115 255 +WM-RH-LATERALOCCIPITAL +4011 235 225 115 255 +WM-RH-LATERALORBITOFRONTAL +4012 220 180 205 255 +WM-RH-LINGUAL +4013 30 115 115 255 +WM-RH-MEDIALORBITOFRONTAL +4014 55 220 180 255 +WM-RH-MIDDLETEMPORAL +4015 95 155 205 255 +WM-RH-PARAHIPPOCAMPAL +4016 235 35 195 255 +WM-RH-PARACENTRAL +4017 195 35 195 255 +WM-RH-PARSOPERCULARIS +4018 35 75 115 255 +WM-RH-PARSORBITALIS +4019 235 155 205 255 +WM-RH-PARSTRIANGULARIS +4020 35 195 235 255 +WM-RH-PERICALCARINE +4021 135 155 195 255 +WM-RH-POSTCENTRAL +4022 35 235 235 255 +WM-RH-POSTERIORCINGULATE +4023 35 75 35 255 +WM-RH-PRECENTRAL +4024 195 235 35 255 +WM-RH-PRECUNEUS +4025 95 115 75 255 +WM-RH-ROSTRALANTERIORCINGULATE +4026 175 235 115 255 +WM-RH-ROSTRALMIDDLEFRONTAL +4027 180 205 130 255 +WM-RH-SUPERIORFRONTAL +4028 235 35 95 255 +WM-RH-SUPERIORPARIETAL +4029 235 75 115 255 +WM-RH-SUPERIORTEMPORAL +4030 115 35 35 255 +WM-RH-SUPRAMARGINAL +4031 175 95 235 255 +WM-RH-FRONTALPOLE +4032 155 255 155 255 +WM-RH-TEMPORALPOLE +4033 185 185 185 255 +WM-RH-TRANSVERSETEMPORAL +4034 105 105 55 255 +WM-RH-INSULA +4035 254 191 31 255 +CTX-LH-UNKNOWN +1100 0 0 0 255 +CTX-LH-CORPUS_CALLOSUM +1101 50 50 50 255 +CTX-LH-G_AND_S_INSULA_ONLY_AVERAGE +1102 180 20 30 255 +CTX-LH-G_CINGULATE-ISTHMUS +1103 60 25 25 255 +CTX-LH-G_CINGULATE-MAIN_PART +1104 25 60 60 255 +CTX-LH-G_CINGULATE-CAUDAL_ACC +1200 25 60 61 255 +CTX-LH-G_CINGULATE-ROSTRAL_ACC +1201 25 90 60 255 +CTX-LH-G_CINGULATE-POSTERIOR +1202 25 120 60 255 +CTX-LH-S_CINGULATE-CAUDAL_ACC +1205 25 150 60 255 +CTX-LH-S_CINGULATE-ROSTRAL_ACC +1206 25 180 60 255 +CTX-LH-S_CINGULATE-POSTERIOR +1207 25 210 60 255 +CTX-LH-S_PERICALLOSAL-CAUDAL +1210 25 150 90 255 +CTX-LH-S_PERICALLOSAL-ROSTRAL +1211 25 180 90 255 +CTX-LH-S_PERICALLOSAL-POSTERIOR +1212 25 210 90 255 +CTX-LH-G_CUNEUS +1105 180 20 20 255 +CTX-LH-G_FRONTAL_INF-OPERCULAR_PART +1106 220 20 100 255 +CTX-LH-G_FRONTAL_INF-ORBITAL_PART +1107 140 60 60 255 +CTX-LH-G_FRONTAL_INF-TRIANGULAR_PART +1108 180 220 140 255 +CTX-LH-G_FRONTAL_MIDDLE +1109 140 100 180 255 +CTX-LH-G_FRONTAL_SUPERIOR +1110 180 20 140 255 +CTX-LH-G_FRONTOMARGINAL +1111 140 20 140 255 +CTX-LH-G_INSULAR_LONG +1112 21 10 10 255 +CTX-LH-G_INSULAR_SHORT +1113 225 140 140 255 +CTX-LH-G_AND_S_OCCIPITAL_INFERIOR +1114 23 60 180 255 +CTX-LH-G_OCCIPITAL_MIDDLE +1115 180 60 180 255 +CTX-LH-G_OCCIPITAL_SUPERIOR +1116 20 220 60 255 +CTX-LH-G_OCCIPIT-TEMP_LAT-OR_FUSIFORM +1117 60 20 140 255 +CTX-LH-G_OCCIPIT-TEMP_MED-LINGUAL_PART +1118 220 180 140 255 +CTX-LH-G_OCCIPIT-TEMP_MED-PARAHIPPOCAMPAL_PART +1119 65 100 20 255 +CTX-LH-G_ORBITAL +1120 220 60 20 255 +CTX-LH-G_PARACENTRAL +1121 60 100 60 255 +CTX-LH-G_PARIETAL_INFERIOR-ANGULAR_PART +1122 20 60 220 255 +CTX-LH-G_PARIETAL_INFERIOR-SUPRAMARGINAL_PART +1123 100 100 60 255 +CTX-LH-G_PARIETAL_SUPERIOR +1124 220 180 220 255 +CTX-LH-G_POSTCENTRAL +1125 20 180 140 255 +CTX-LH-G_PRECENTRAL +1126 60 140 180 255 +CTX-LH-G_PRECUNEUS +1127 25 20 140 255 +CTX-LH-G_RECTUS +1128 20 60 100 255 +CTX-LH-G_SUBCALLOSAL +1129 60 220 20 255 +CTX-LH-G_SUBCENTRAL +1130 60 20 220 255 +CTX-LH-G_TEMPORAL_INFERIOR +1131 220 220 100 255 +CTX-LH-G_TEMPORAL_MIDDLE +1132 180 60 60 255 +CTX-LH-G_TEMP_SUP-G_TEMP_TRANSV_AND_INTERM_S +1133 60 60 220 255 +CTX-LH-G_TEMP_SUP-LATERAL_ASPECT +1134 220 60 220 255 +CTX-LH-G_TEMP_SUP-PLANUM_POLARE +1135 65 220 60 255 +CTX-LH-G_TEMP_SUP-PLANUM_TEMPOLARE +1136 25 140 20 255 +CTX-LH-G_AND_S_TRANSVERSE_FRONTOPOLAR +1137 13 0 250 255 +CTX-LH-LAT_FISSURE-ANT_SGT-RAMUS_HORIZONTAL +1138 61 20 220 255 +CTX-LH-LAT_FISSURE-ANT_SGT-RAMUS_VERTICAL +1139 61 20 60 255 +CTX-LH-LAT_FISSURE-POST_SGT +1140 61 60 100 255 +CTX-LH-MEDIAL_WALL +1141 25 25 25 255 +CTX-LH-POLE_OCCIPITAL +1142 140 20 60 255 +CTX-LH-POLE_TEMPORAL +1143 220 180 20 255 +CTX-LH-S_CALCARINE +1144 63 180 180 255 +CTX-LH-S_CENTRAL +1145 221 20 10 255 +CTX-LH-S_CENTRAL_INSULA +1146 21 220 20 255 +CTX-LH-S_CINGULATE-MAIN_PART_AND_INTRACINGULATE +1147 183 100 20 255 +CTX-LH-S_CINGULATE-MARGINALIS_PART +1148 221 20 100 255 +CTX-LH-S_CIRCULAR_INSULA_ANTERIOR +1149 221 60 140 255 +CTX-LH-S_CIRCULAR_INSULA_INFERIOR +1150 221 20 220 255 +CTX-LH-S_CIRCULAR_INSULA_SUPERIOR +1151 61 220 220 255 +CTX-LH-S_COLLATERAL_TRANSVERSE_ANT +1152 100 200 200 255 +CTX-LH-S_COLLATERAL_TRANSVERSE_POST +1153 10 200 200 255 +CTX-LH-S_FRONTAL_INFERIOR +1154 221 220 20 255 +CTX-LH-S_FRONTAL_MIDDLE +1155 141 20 100 255 +CTX-LH-S_FRONTAL_SUPERIOR +1156 61 220 100 255 +CTX-LH-S_FRONTOMARGINAL +1157 21 220 60 255 +CTX-LH-S_INTERMEDIUS_PRIMUS-JENSEN +1158 141 60 20 255 +CTX-LH-S_INTRAPARIETAL-AND_PARIETAL_TRANSVERSE +1159 143 20 220 255 +CTX-LH-S_OCCIPITAL_ANTERIOR +1160 61 20 180 255 +CTX-LH-S_OCCIPITAL_MIDDLE_AND_LUNATUS +1161 101 60 220 255 +CTX-LH-S_OCCIPITAL_SUPERIOR_AND_TRANSVERSALIS +1162 21 20 140 255 +CTX-LH-S_OCCIPITO-TEMPORAL_LATERAL +1163 221 140 20 255 +CTX-LH-S_OCCIPITO-TEMPORAL_MEDIAL_AND_S_LINGUAL +1164 141 100 220 255 +CTX-LH-S_ORBITAL-H_SHAPPED +1165 101 20 20 255 +CTX-LH-S_ORBITAL_LATERAL +1166 221 100 20 255 +CTX-LH-S_ORBITAL_MEDIAL-OR_OLFACTORY +1167 181 200 20 255 +CTX-LH-S_PARACENTRAL +1168 21 180 140 255 +CTX-LH-S_PARIETO_OCCIPITAL +1169 101 100 180 255 +CTX-LH-S_PERICALLOSAL +1170 181 220 20 255 +CTX-LH-S_POSTCENTRAL +1171 21 140 200 255 +CTX-LH-S_PRECENTRAL-INFERIOR-PART +1172 21 20 240 255 +CTX-LH-S_PRECENTRAL-SUPERIOR-PART +1173 21 20 200 255 +CTX-LH-S_SUBCENTRAL_ANT +1174 61 180 60 255 +CTX-LH-S_SUBCENTRAL_POST +1175 61 180 250 255 +CTX-LH-S_SUBORBITAL +1176 21 20 60 255 +CTX-LH-S_SUBPARIETAL +1177 101 60 60 255 +CTX-LH-S_SUPRACINGULATE +1178 21 220 220 255 +CTX-LH-S_TEMPORAL_INFERIOR +1179 21 180 180 255 +CTX-LH-S_TEMPORAL_SUPERIOR +1180 223 220 60 255 +CTX-LH-S_TEMPORAL_TRANSVERSE +1181 221 60 60 255 +CTX-RH-UNKNOWN +2100 0 0 0 255 +CTX-RH-CORPUS_CALLOSUM +2101 50 50 50 255 +CTX-RH-G_AND_S_INSULA_ONLY_AVERAGE +2102 180 20 30 255 +CTX-RH-G_CINGULATE-ISTHMUS +2103 60 25 25 255 +CTX-RH-G_CINGULATE-MAIN_PART +2104 25 60 60 255 +CTX-RH-G_CUNEUS +2105 180 20 20 255 +CTX-RH-G_FRONTAL_INF-OPERCULAR_PART +2106 220 20 100 255 +CTX-RH-G_FRONTAL_INF-ORBITAL_PART +2107 140 60 60 255 +CTX-RH-G_FRONTAL_INF-TRIANGULAR_PART +2108 180 220 140 255 +CTX-RH-G_FRONTAL_MIDDLE +2109 140 100 180 255 +CTX-RH-G_FRONTAL_SUPERIOR +2110 180 20 140 255 +CTX-RH-G_FRONTOMARGINAL +2111 140 20 140 255 +CTX-RH-G_INSULAR_LONG +2112 21 10 10 255 +CTX-RH-G_INSULAR_SHORT +2113 225 140 140 255 +CTX-RH-G_AND_S_OCCIPITAL_INFERIOR +2114 23 60 180 255 +CTX-RH-G_OCCIPITAL_MIDDLE +2115 180 60 180 255 +CTX-RH-G_OCCIPITAL_SUPERIOR +2116 20 220 60 255 +CTX-RH-G_OCCIPIT-TEMP_LAT-OR_FUSIFORM +2117 60 20 140 255 +CTX-RH-G_OCCIPIT-TEMP_MED-LINGUAL_PART +2118 220 180 140 255 +CTX-RH-G_OCCIPIT-TEMP_MED-PARAHIPPOCAMPAL_PART +2119 65 100 20 255 +CTX-RH-G_ORBITAL +2120 220 60 20 255 +CTX-RH-G_PARACENTRAL +2121 60 100 60 255 +CTX-RH-G_PARIETAL_INFERIOR-ANGULAR_PART +2122 20 60 220 255 +CTX-RH-G_PARIETAL_INFERIOR-SUPRAMARGINAL_PART +2123 100 100 60 255 +CTX-RH-G_PARIETAL_SUPERIOR +2124 220 180 220 255 +CTX-RH-G_POSTCENTRAL +2125 20 180 140 255 +CTX-RH-G_PRECENTRAL +2126 60 140 180 255 +CTX-RH-G_PRECUNEUS +2127 25 20 140 255 +CTX-RH-G_RECTUS +2128 20 60 100 255 +CTX-RH-G_SUBCALLOSAL +2129 60 220 20 255 +CTX-RH-G_SUBCENTRAL +2130 60 20 220 255 +CTX-RH-G_TEMPORAL_INFERIOR +2131 220 220 100 255 +CTX-RH-G_TEMPORAL_MIDDLE +2132 180 60 60 255 +CTX-RH-G_TEMP_SUP-G_TEMP_TRANSV_AND_INTERM_S +2133 60 60 220 255 +CTX-RH-G_TEMP_SUP-LATERAL_ASPECT +2134 220 60 220 255 +CTX-RH-G_TEMP_SUP-PLANUM_POLARE +2135 65 220 60 255 +CTX-RH-G_TEMP_SUP-PLANUM_TEMPOLARE +2136 25 140 20 255 +CTX-RH-G_AND_S_TRANSVERSE_FRONTOPOLAR +2137 13 0 250 255 +CTX-RH-LAT_FISSURE-ANT_SGT-RAMUS_HORIZONTAL +2138 61 20 220 255 +CTX-RH-LAT_FISSURE-ANT_SGT-RAMUS_VERTICAL +2139 61 20 60 255 +CTX-RH-LAT_FISSURE-POST_SGT +2140 61 60 100 255 +CTX-RH-MEDIAL_WALL +2141 25 25 25 255 +CTX-RH-POLE_OCCIPITAL +2142 140 20 60 255 +CTX-RH-POLE_TEMPORAL +2143 220 180 20 255 +CTX-RH-S_CALCARINE +2144 63 180 180 255 +CTX-RH-S_CENTRAL +2145 221 20 10 255 +CTX-RH-S_CENTRAL_INSULA +2146 21 220 20 255 +CTX-RH-S_CINGULATE-MAIN_PART_AND_INTRACINGULATE +2147 183 100 20 255 +CTX-RH-S_CINGULATE-MARGINALIS_PART +2148 221 20 100 255 +CTX-RH-S_CIRCULAR_INSULA_ANTERIOR +2149 221 60 140 255 +CTX-RH-S_CIRCULAR_INSULA_INFERIOR +2150 221 20 220 255 +CTX-RH-S_CIRCULAR_INSULA_SUPERIOR +2151 61 220 220 255 +CTX-RH-S_COLLATERAL_TRANSVERSE_ANT +2152 100 200 200 255 +CTX-RH-S_COLLATERAL_TRANSVERSE_POST +2153 10 200 200 255 +CTX-RH-S_FRONTAL_INFERIOR +2154 221 220 20 255 +CTX-RH-S_FRONTAL_MIDDLE +2155 141 20 100 255 +CTX-RH-S_FRONTAL_SUPERIOR +2156 61 220 100 255 +CTX-RH-S_FRONTOMARGINAL +2157 21 220 60 255 +CTX-RH-S_INTERMEDIUS_PRIMUS-JENSEN +2158 141 60 20 255 +CTX-RH-S_INTRAPARIETAL-AND_PARIETAL_TRANSVERSE +2159 143 20 220 255 +CTX-RH-S_OCCIPITAL_ANTERIOR +2160 61 20 180 255 +CTX-RH-S_OCCIPITAL_MIDDLE_AND_LUNATUS +2161 101 60 220 255 +CTX-RH-S_OCCIPITAL_SUPERIOR_AND_TRANSVERSALIS +2162 21 20 140 255 +CTX-RH-S_OCCIPITO-TEMPORAL_LATERAL +2163 221 140 20 255 +CTX-RH-S_OCCIPITO-TEMPORAL_MEDIAL_AND_S_LINGUAL +2164 141 100 220 255 +CTX-RH-S_ORBITAL-H_SHAPPED +2165 101 20 20 255 +CTX-RH-S_ORBITAL_LATERAL +2166 221 100 20 255 +CTX-RH-S_ORBITAL_MEDIAL-OR_OLFACTORY +2167 181 200 20 255 +CTX-RH-S_PARACENTRAL +2168 21 180 140 255 +CTX-RH-S_PARIETO_OCCIPITAL +2169 101 100 180 255 +CTX-RH-S_PERICALLOSAL +2170 181 220 20 255 +CTX-RH-S_POSTCENTRAL +2171 21 140 200 255 +CTX-RH-S_PRECENTRAL-INFERIOR-PART +2172 21 20 240 255 +CTX-RH-S_PRECENTRAL-SUPERIOR-PART +2173 21 20 200 255 +CTX-RH-S_SUBCENTRAL_ANT +2174 61 180 60 255 +CTX-RH-S_SUBCENTRAL_POST +2175 61 180 250 255 +CTX-RH-S_SUBORBITAL +2176 21 20 60 255 +CTX-RH-S_SUBPARIETAL +2177 101 60 60 255 +CTX-RH-S_SUPRACINGULATE +2178 21 220 220 255 +CTX-RH-S_TEMPORAL_INFERIOR +2179 21 180 180 255 +CTX-RH-S_TEMPORAL_SUPERIOR +2180 223 220 60 255 +CTX-RH-S_TEMPORAL_TRANSVERSE +2181 221 60 60 255 +CTX-RH-G_CINGULATE-CAUDAL_ACC +2200 25 60 61 255 +CTX-RH-G_CINGULATE-ROSTRAL_ACC +2201 25 90 60 255 +CTX-RH-G_CINGULATE-POSTERIOR +2202 25 120 60 255 +CTX-RH-S_CINGULATE-CAUDAL_ACC +2205 25 150 60 255 +CTX-RH-S_CINGULATE-ROSTRAL_ACC +2206 25 180 60 255 +CTX-RH-S_CINGULATE-POSTERIOR +2207 25 210 60 255 +CTX-RH-S_PERICALLOSAL-CAUDAL +2210 25 150 90 255 +CTX-RH-S_PERICALLOSAL-ROSTRAL +2211 25 180 90 255 +CTX-RH-S_PERICALLOSAL-POSTERIOR +2212 25 210 90 255 +WM-LH-UNKNOWN +3100 0 0 0 255 +WM-LH-CORPUS_CALLOSUM +3101 50 50 50 255 +WM-LH-G_AND_S_INSULA_ONLY_AVERAGE +3102 180 20 30 255 +WM-LH-G_CINGULATE-ISTHMUS +3103 60 25 25 255 +WM-LH-G_CINGULATE-MAIN_PART +3104 25 60 60 255 +WM-LH-G_CUNEUS +3105 180 20 20 255 +WM-LH-G_FRONTAL_INF-OPERCULAR_PART +3106 220 20 100 255 +WM-LH-G_FRONTAL_INF-ORBITAL_PART +3107 140 60 60 255 +WM-LH-G_FRONTAL_INF-TRIANGULAR_PART +3108 180 220 140 255 +WM-LH-G_FRONTAL_MIDDLE +3109 140 100 180 255 +WM-LH-G_FRONTAL_SUPERIOR +3110 180 20 140 255 +WM-LH-G_FRONTOMARGINAL +3111 140 20 140 255 +WM-LH-G_INSULAR_LONG +3112 21 10 10 255 +WM-LH-G_INSULAR_SHORT +3113 225 140 140 255 +WM-LH-G_AND_S_OCCIPITAL_INFERIOR +3114 23 60 180 255 +WM-LH-G_OCCIPITAL_MIDDLE +3115 180 60 180 255 +WM-LH-G_OCCIPITAL_SUPERIOR +3116 20 220 60 255 +WM-LH-G_OCCIPIT-TEMP_LAT-OR_FUSIFORM +3117 60 20 140 255 +WM-LH-G_OCCIPIT-TEMP_MED-LINGUAL_PART +3118 220 180 140 255 +WM-LH-G_OCCIPIT-TEMP_MED-PARAHIPPOCAMPAL_PART +3119 65 100 20 255 +WM-LH-G_ORBITAL +3120 220 60 20 255 +WM-LH-G_PARACENTRAL +3121 60 100 60 255 +WM-LH-G_PARIETAL_INFERIOR-ANGULAR_PART +3122 20 60 220 255 +WM-LH-G_PARIETAL_INFERIOR-SUPRAMARGINAL_PART +3123 100 100 60 255 +WM-LH-G_PARIETAL_SUPERIOR +3124 220 180 220 255 +WM-LH-G_POSTCENTRAL +3125 20 180 140 255 +WM-LH-G_PRECENTRAL +3126 60 140 180 255 +WM-LH-G_PRECUNEUS +3127 25 20 140 255 +WM-LH-G_RECTUS +3128 20 60 100 255 +WM-LH-G_SUBCALLOSAL +3129 60 220 20 255 +WM-LH-G_SUBCENTRAL +3130 60 20 220 255 +WM-LH-G_TEMPORAL_INFERIOR +3131 220 220 100 255 +WM-LH-G_TEMPORAL_MIDDLE +3132 180 60 60 255 +WM-LH-G_TEMP_SUP-G_TEMP_TRANSV_AND_INTERM_S +3133 60 60 220 255 +WM-LH-G_TEMP_SUP-LATERAL_ASPECT +3134 220 60 220 255 +WM-LH-G_TEMP_SUP-PLANUM_POLARE +3135 65 220 60 255 +WM-LH-G_TEMP_SUP-PLANUM_TEMPOLARE +3136 25 140 20 255 +WM-LH-G_AND_S_TRANSVERSE_FRONTOPOLAR +3137 13 0 250 255 +WM-LH-LAT_FISSURE-ANT_SGT-RAMUS_HORIZONTAL +3138 61 20 220 255 +WM-LH-LAT_FISSURE-ANT_SGT-RAMUS_VERTICAL +3139 61 20 60 255 +WM-LH-LAT_FISSURE-POST_SGT +3140 61 60 100 255 +WM-LH-MEDIAL_WALL +3141 25 25 25 255 +WM-LH-POLE_OCCIPITAL +3142 140 20 60 255 +WM-LH-POLE_TEMPORAL +3143 220 180 20 255 +WM-LH-S_CALCARINE +3144 63 180 180 255 +WM-LH-S_CENTRAL +3145 221 20 10 255 +WM-LH-S_CENTRAL_INSULA +3146 21 220 20 255 +WM-LH-S_CINGULATE-MAIN_PART_AND_INTRACINGULATE +3147 183 100 20 255 +WM-LH-S_CINGULATE-MARGINALIS_PART +3148 221 20 100 255 +WM-LH-S_CIRCULAR_INSULA_ANTERIOR +3149 221 60 140 255 +WM-LH-S_CIRCULAR_INSULA_INFERIOR +3150 221 20 220 255 +WM-LH-S_CIRCULAR_INSULA_SUPERIOR +3151 61 220 220 255 +WM-LH-S_COLLATERAL_TRANSVERSE_ANT +3152 100 200 200 255 +WM-LH-S_COLLATERAL_TRANSVERSE_POST +3153 10 200 200 255 +WM-LH-S_FRONTAL_INFERIOR +3154 221 220 20 255 +WM-LH-S_FRONTAL_MIDDLE +3155 141 20 100 255 +WM-LH-S_FRONTAL_SUPERIOR +3156 61 220 100 255 +WM-LH-S_FRONTOMARGINAL +3157 21 220 60 255 +WM-LH-S_INTERMEDIUS_PRIMUS-JENSEN +3158 141 60 20 255 +WM-LH-S_INTRAPARIETAL-AND_PARIETAL_TRANSVERSE +3159 143 20 220 255 +WM-LH-S_OCCIPITAL_ANTERIOR +3160 61 20 180 255 +WM-LH-S_OCCIPITAL_MIDDLE_AND_LUNATUS +3161 101 60 220 255 +WM-LH-S_OCCIPITAL_SUPERIOR_AND_TRANSVERSALIS +3162 21 20 140 255 +WM-LH-S_OCCIPITO-TEMPORAL_LATERAL +3163 221 140 20 255 +WM-LH-S_OCCIPITO-TEMPORAL_MEDIAL_AND_S_LINGUAL +3164 141 100 220 255 +WM-LH-S_ORBITAL-H_SHAPPED +3165 101 20 20 255 +WM-LH-S_ORBITAL_LATERAL +3166 221 100 20 255 +WM-LH-S_ORBITAL_MEDIAL-OR_OLFACTORY +3167 181 200 20 255 +WM-LH-S_PARACENTRAL +3168 21 180 140 255 +WM-LH-S_PARIETO_OCCIPITAL +3169 101 100 180 255 +WM-LH-S_PERICALLOSAL +3170 181 220 20 255 +WM-LH-S_POSTCENTRAL +3171 21 140 200 255 +WM-LH-S_PRECENTRAL-INFERIOR-PART +3172 21 20 240 255 +WM-LH-S_PRECENTRAL-SUPERIOR-PART +3173 21 20 200 255 +WM-LH-S_SUBCENTRAL_ANT +3174 61 180 60 255 +WM-LH-S_SUBCENTRAL_POST +3175 61 180 250 255 +WM-LH-S_SUBORBITAL +3176 21 20 60 255 +WM-LH-S_SUBPARIETAL +3177 101 60 60 255 +WM-LH-S_SUPRACINGULATE +3178 21 220 220 255 +WM-LH-S_TEMPORAL_INFERIOR +3179 21 180 180 255 +WM-LH-S_TEMPORAL_SUPERIOR +3180 223 220 60 255 +WM-LH-S_TEMPORAL_TRANSVERSE +3181 221 60 60 255 +WM-RH-UNKNOWN +4100 0 0 0 255 +WM-RH-CORPUS_CALLOSUM +4101 50 50 50 255 +WM-RH-G_AND_S_INSULA_ONLY_AVERAGE +4102 180 20 30 255 +WM-RH-G_CINGULATE-ISTHMUS +4103 60 25 25 255 +WM-RH-G_CINGULATE-MAIN_PART +4104 25 60 60 255 +WM-RH-G_CUNEUS +4105 180 20 20 255 +WM-RH-G_FRONTAL_INF-OPERCULAR_PART +4106 220 20 100 255 +WM-RH-G_FRONTAL_INF-ORBITAL_PART +4107 140 60 60 255 +WM-RH-G_FRONTAL_INF-TRIANGULAR_PART +4108 180 220 140 255 +WM-RH-G_FRONTAL_MIDDLE +4109 140 100 180 255 +WM-RH-G_FRONTAL_SUPERIOR +4110 180 20 140 255 +WM-RH-G_FRONTOMARGINAL +4111 140 20 140 255 +WM-RH-G_INSULAR_LONG +4112 21 10 10 255 +WM-RH-G_INSULAR_SHORT +4113 225 140 140 255 +WM-RH-G_AND_S_OCCIPITAL_INFERIOR +4114 23 60 180 255 +WM-RH-G_OCCIPITAL_MIDDLE +4115 180 60 180 255 +WM-RH-G_OCCIPITAL_SUPERIOR +4116 20 220 60 255 +WM-RH-G_OCCIPIT-TEMP_LAT-OR_FUSIFORM +4117 60 20 140 255 +WM-RH-G_OCCIPIT-TEMP_MED-LINGUAL_PART +4118 220 180 140 255 +WM-RH-G_OCCIPIT-TEMP_MED-PARAHIPPOCAMPAL_PART +4119 65 100 20 255 +WM-RH-G_ORBITAL +4120 220 60 20 255 +WM-RH-G_PARACENTRAL +4121 60 100 60 255 +WM-RH-G_PARIETAL_INFERIOR-ANGULAR_PART +4122 20 60 220 255 +WM-RH-G_PARIETAL_INFERIOR-SUPRAMARGINAL_PART +4123 100 100 60 255 +WM-RH-G_PARIETAL_SUPERIOR +4124 220 180 220 255 +WM-RH-G_POSTCENTRAL +4125 20 180 140 255 +WM-RH-G_PRECENTRAL +4126 60 140 180 255 +WM-RH-G_PRECUNEUS +4127 25 20 140 255 +WM-RH-G_RECTUS +4128 20 60 100 255 +WM-RH-G_SUBCALLOSAL +4129 60 220 20 255 +WM-RH-G_SUBCENTRAL +4130 60 20 220 255 +WM-RH-G_TEMPORAL_INFERIOR +4131 220 220 100 255 +WM-RH-G_TEMPORAL_MIDDLE +4132 180 60 60 255 +WM-RH-G_TEMP_SUP-G_TEMP_TRANSV_AND_INTERM_S +4133 60 60 220 255 +WM-RH-G_TEMP_SUP-LATERAL_ASPECT +4134 220 60 220 255 +WM-RH-G_TEMP_SUP-PLANUM_POLARE +4135 65 220 60 255 +WM-RH-G_TEMP_SUP-PLANUM_TEMPOLARE +4136 25 140 20 255 +WM-RH-G_AND_S_TRANSVERSE_FRONTOPOLAR +4137 13 0 250 255 +WM-RH-LAT_FISSURE-ANT_SGT-RAMUS_HORIZONTAL +4138 61 20 220 255 +WM-RH-LAT_FISSURE-ANT_SGT-RAMUS_VERTICAL +4139 61 20 60 255 +WM-RH-LAT_FISSURE-POST_SGT +4140 61 60 100 255 +WM-RH-MEDIAL_WALL +4141 25 25 25 255 +WM-RH-POLE_OCCIPITAL +4142 140 20 60 255 +WM-RH-POLE_TEMPORAL +4143 220 180 20 255 +WM-RH-S_CALCARINE +4144 63 180 180 255 +WM-RH-S_CENTRAL +4145 221 20 10 255 +WM-RH-S_CENTRAL_INSULA +4146 21 220 20 255 +WM-RH-S_CINGULATE-MAIN_PART_AND_INTRACINGULATE +4147 183 100 20 255 +WM-RH-S_CINGULATE-MARGINALIS_PART +4148 221 20 100 255 +WM-RH-S_CIRCULAR_INSULA_ANTERIOR +4149 221 60 140 255 +WM-RH-S_CIRCULAR_INSULA_INFERIOR +4150 221 20 220 255 +WM-RH-S_CIRCULAR_INSULA_SUPERIOR +4151 61 220 220 255 +WM-RH-S_COLLATERAL_TRANSVERSE_ANT +4152 100 200 200 255 +WM-RH-S_COLLATERAL_TRANSVERSE_POST +4153 10 200 200 255 +WM-RH-S_FRONTAL_INFERIOR +4154 221 220 20 255 +WM-RH-S_FRONTAL_MIDDLE +4155 141 20 100 255 +WM-RH-S_FRONTAL_SUPERIOR +4156 61 220 100 255 +WM-RH-S_FRONTOMARGINAL +4157 21 220 60 255 +WM-RH-S_INTERMEDIUS_PRIMUS-JENSEN +4158 141 60 20 255 +WM-RH-S_INTRAPARIETAL-AND_PARIETAL_TRANSVERSE +4159 143 20 220 255 +WM-RH-S_OCCIPITAL_ANTERIOR +4160 61 20 180 255 +WM-RH-S_OCCIPITAL_MIDDLE_AND_LUNATUS +4161 101 60 220 255 +WM-RH-S_OCCIPITAL_SUPERIOR_AND_TRANSVERSALIS +4162 21 20 140 255 +WM-RH-S_OCCIPITO-TEMPORAL_LATERAL +4163 221 140 20 255 +WM-RH-S_OCCIPITO-TEMPORAL_MEDIAL_AND_S_LINGUAL +4164 141 100 220 255 +WM-RH-S_ORBITAL-H_SHAPPED +4165 101 20 20 255 +WM-RH-S_ORBITAL_LATERAL +4166 221 100 20 255 +WM-RH-S_ORBITAL_MEDIAL-OR_OLFACTORY +4167 181 200 20 255 +WM-RH-S_PARACENTRAL +4168 21 180 140 255 +WM-RH-S_PARIETO_OCCIPITAL +4169 101 100 180 255 +WM-RH-S_PERICALLOSAL +4170 181 220 20 255 +WM-RH-S_POSTCENTRAL +4171 21 140 200 255 +WM-RH-S_PRECENTRAL-INFERIOR-PART +4172 21 20 240 255 +WM-RH-S_PRECENTRAL-SUPERIOR-PART +4173 21 20 200 255 +WM-RH-S_SUBCENTRAL_ANT +4174 61 180 60 255 +WM-RH-S_SUBCENTRAL_POST +4175 61 180 250 255 +WM-RH-S_SUBORBITAL +4176 21 20 60 255 +WM-RH-S_SUBPARIETAL +4177 101 60 60 255 +WM-RH-S_SUPRACINGULATE +4178 21 220 220 255 +WM-RH-S_TEMPORAL_INFERIOR +4179 21 180 180 255 +WM-RH-S_TEMPORAL_SUPERIOR +4180 223 220 60 255 +WM-RH-S_TEMPORAL_TRANSVERSE +4181 221 60 60 255 +LEFT-UNSEGMENTEDWHITEMATTER +5001 20 30 40 255 +RIGHT-UNSEGMENTEDWHITEMATTER +5002 20 30 40 255 +FMAJOR +5100 204 102 102 255 +FMINOR +5101 204 102 102 255 +LH.ATR +5102 255 255 102 255 +LH.CAB +5103 153 204 0 255 +LH.CCG +5104 0 153 153 255 +LH.CST +5105 204 153 255 255 +LH.ILF +5106 255 153 51 255 +LH.SLFP +5107 204 204 204 255 +LH.SLFT +5108 153 255 255 255 +LH.UNC +5109 102 153 255 255 +RH.ATR +5110 255 255 102 255 +RH.CAB +5111 153 204 0 255 +RH.CCG +5112 0 153 153 255 +RH.CST +5113 204 153 255 255 +RH.ILF +5114 255 153 51 255 +RH.SLFP +5115 204 204 204 255 +RH.SLFT +5116 153 255 255 255 +RH.UNC +5117 102 153 255 255 +CC-FORCEPSMAJOR +5200 204 102 102 255 +CC-FORCEPSMINOR +5201 204 102 102 255 +LANTTHALRADIATION +5202 255 255 102 255 +LCINGULUMANGBUNDLE +5203 153 204 0 255 +LCINGULUMCINGGYRUS +5204 0 153 153 255 +LCORTICOSPINALTRACT +5205 204 153 255 255 +LINFLONGFAS +5206 255 153 51 255 +LSUPLONGFASPARIETAL +5207 204 204 204 255 +LSUPLONGFASTEMPORAL +5208 153 255 255 255 +LUNCINATEFAS +5209 102 153 255 255 +RANTTHALRADIATION +5210 255 255 102 255 +RCINGULUMANGBUNDLE +5211 153 204 0 255 +RCINGULUMCINGGYRUS +5212 0 153 153 255 +RCORTICOSPINALTRACT +5213 204 153 255 255 +RINFLONGFAS +5214 255 153 51 255 +RSUPLONGFASPARIETAL +5215 204 204 204 255 +RSUPLONGFASTEMPORAL +5216 153 255 255 255 +RUNCINATEFAS +5217 102 153 255 255 +CST-ORIG +6000 0 255 0 255 +CST-HAMMER +6001 255 255 0 255 +CST-CVS +6002 0 255 255 255 +CST-FLIRT +6003 0 0 255 255 +LEFT-SLF1 +6010 236 16 231 255 +RIGHT-SLF1 +6020 237 18 232 255 +LEFT-SLF3 +6030 236 13 227 255 +RIGHT-SLF3 +6040 236 17 228 255 +LEFT-CST +6050 1 255 1 255 +RIGHT-CST +6060 2 255 1 255 +LEFT-SLF2 +6070 236 14 230 255 +RIGHT-SLF2 +6080 237 14 230 255 +LATERAL-NUCLEUS +7001 72 132 181 255 +BASOLATERAL-NUCLEUS +7002 243 243 243 255 +BASAL-NUCLEUS +7003 207 63 79 255 +CENTROMEDIAL-NUCLEUS +7004 121 20 135 255 +CENTRAL-NUCLEUS +7005 197 60 248 255 +MEDIAL-NUCLEUS +7006 2 149 2 255 +CORTICAL-NUCLEUS +7007 221 249 166 255 +ACCESSORY-BASAL-NUCLEUS +7008 232 146 35 255 +CORTICOAMYGDALOID-TRANSITIO +7009 20 60 120 255 +ANTERIOR-AMYGDALOID-AREA-AAA +7010 250 250 0 255 +FUSION-AMYGDALA-HP-FAH +7011 122 187 222 255 +HIPPOCAMPAL-AMYGDALA-TRANSITION-HATA +7012 237 12 177 255 +ENDOPIRIFORM-NUCLEUS +7013 10 49 255 255 +LATERAL-NUCLEUS-OLFACTORY-TRACT +7014 205 184 144 255 +PARALAMINAR-NUCLEUS +7015 45 205 165 255 +INTERCALATED-NUCLEUS +7016 117 160 175 255 +PREPIRIFORM-CORTEX +7017 221 217 21 255 +PERIAMYGDALOID-CORTEX +7018 20 60 120 255 +ENVELOPE-AMYGDALA +7019 141 21 100 255 +EXTRANUCLEAR-AMYDALA +7020 225 140 141 255 +BRAINSTEM-INFERIOR-COLLICULUS +7100 42 201 168 255 +BRAINSTEM-COCHLEAR-NUCLEUS +7101 168 104 162 255 +THALAMUS-ANTERIOR +8001 74 130 181 255 +THALAMUS-VENTRAL-ANTERIOR +8002 242 241 240 255 +THALAMUS-LATERAL-DORSAL +8003 206 65 78 255 +THALAMUS-LATERAL-POSTERIOR +8004 120 21 133 255 +THALAMUS-VENTRAL-LATERAL +8005 195 61 246 255 +THALAMUS-VENTRAL-POSTERIOR-MEDIAL +8006 3 147 6 255 +THALAMUS-VENTRAL-POSTERIOR-LATERAL +8007 220 251 163 255 +THALAMUS-INTRALAMINAR +8008 232 146 33 255 +THALAMUS-CENTROMEDIAN +8009 4 114 14 255 +THALAMUS-MEDIODORSAL +8010 121 184 220 255 +THALAMUS-MEDIAL +8011 235 11 175 255 +THALAMUS-PULVINAR +8012 12 46 250 255 +THALAMUS-LATERAL-GENICULATE +8013 203 182 143 255 +THALAMUS-MEDIAL-GENICULATE +8014 42 204 167 255 +CTX-LH-PREFRONTAL +9000 30 5 30 255 +CTX-LH-PRIMARY-MOTOR +9001 30 100 45 255 +CTX-LH-PREMOTOR +9002 130 100 165 255 +CTX-LH-TEMPORAL +9003 105 25 5 255 +CTX-LH-POSTERIOR-PARIETAL +9004 125 70 55 255 +CTX-LH-PRIM-SEC-SOMATOSENSORY +9005 225 20 105 255 +CTX-LH-OCCIPITAL +9006 225 20 15 255 +CTX-RH-PREFRONTAL +9500 30 55 30 255 +CTX-RH-PRIMARY-MOTOR +9501 30 150 45 255 +CTX-RH-PREMOTOR +9502 130 150 165 255 +CTX-RH-TEMPORAL +9503 105 75 5 255 +CTX-RH-POSTERIOR-PARIETAL +9504 125 120 55 255 +CTX-RH-PRIM-SEC-SOMATOSENSORY +9505 225 70 105 255 +CTX-RH-OCCIPITAL +9506 225 70 15 255 +CTX_LH_UNKNOWN +11100 0 0 0 255 +CTX_LH_G_AND_S_FRONTOMARGIN +11101 23 220 60 255 +CTX_LH_G_AND_S_OCCIPITAL_INF +11102 23 60 180 255 +CTX_LH_G_AND_S_PARACENTRAL +11103 63 100 60 255 +CTX_LH_G_AND_S_SUBCENTRAL +11104 63 20 220 255 +CTX_LH_G_AND_S_TRANSV_FRONTOPOL +11105 13 0 250 255 +CTX_LH_G_AND_S_CINGUL-ANT +11106 26 60 0 255 +CTX_LH_G_AND_S_CINGUL-MID-ANT +11107 26 60 75 255 +CTX_LH_G_AND_S_CINGUL-MID-POST +11108 26 60 150 255 +CTX_LH_G_CINGUL-POST-DORSAL +11109 25 60 250 255 +CTX_LH_G_CINGUL-POST-VENTRAL +11110 60 25 25 255 +CTX_LH_G_CUNEUS +11111 180 20 20 255 +CTX_LH_G_FRONT_INF-OPERCULAR +11112 220 20 100 255 +CTX_LH_G_FRONT_INF-ORBITAL +11113 140 60 60 255 +CTX_LH_G_FRONT_INF-TRIANGUL +11114 180 220 140 255 +CTX_LH_G_FRONT_MIDDLE +11115 140 100 180 255 +CTX_LH_G_FRONT_SUP +11116 180 20 140 255 +CTX_LH_G_INS_LG_AND_S_CENT_INS +11117 23 10 10 255 +CTX_LH_G_INSULAR_SHORT +11118 225 140 140 255 +CTX_LH_G_OCCIPITAL_MIDDLE +11119 180 60 180 255 +CTX_LH_G_OCCIPITAL_SUP +11120 20 220 60 255 +CTX_LH_G_OC-TEMP_LAT-FUSIFOR +11121 60 20 140 255 +CTX_LH_G_OC-TEMP_MED-LINGUAL +11122 220 180 140 255 +CTX_LH_G_OC-TEMP_MED-PARAHIP +11123 65 100 20 255 +CTX_LH_G_ORBITAL +11124 220 60 20 255 +CTX_LH_G_PARIET_INF-ANGULAR +11125 20 60 220 255 +CTX_LH_G_PARIET_INF-SUPRAMAR +11126 100 100 60 255 +CTX_LH_G_PARIETAL_SUP +11127 220 180 220 255 +CTX_LH_G_POSTCENTRAL +11128 20 180 140 255 +CTX_LH_G_PRECENTRAL +11129 60 140 180 255 +CTX_LH_G_PRECUNEUS +11130 25 20 140 255 +CTX_LH_G_RECTUS +11131 20 60 100 255 +CTX_LH_G_SUBCALLOSAL +11132 60 220 20 255 +CTX_LH_G_TEMP_SUP-G_T_TRANSV +11133 60 60 220 255 +CTX_LH_G_TEMP_SUP-LATERAL +11134 220 60 220 255 +CTX_LH_G_TEMP_SUP-PLAN_POLAR +11135 65 220 60 255 +CTX_LH_G_TEMP_SUP-PLAN_TEMPO +11136 25 140 20 255 +CTX_LH_G_TEMPORAL_INF +11137 220 220 100 255 +CTX_LH_G_TEMPORAL_MIDDLE +11138 180 60 60 255 +CTX_LH_LAT_FIS-ANT-HORIZONT +11139 61 20 220 255 +CTX_LH_LAT_FIS-ANT-VERTICAL +11140 61 20 60 255 +CTX_LH_LAT_FIS-POST +11141 61 60 100 255 +CTX_LH_MEDIAL_WALL +11142 25 25 25 255 +CTX_LH_POLE_OCCIPITAL +11143 140 20 60 255 +CTX_LH_POLE_TEMPORAL +11144 220 180 20 255 +CTX_LH_S_CALCARINE +11145 63 180 180 255 +CTX_LH_S_CENTRAL +11146 221 20 10 255 +CTX_LH_S_CINGUL-MARGINALIS +11147 221 20 100 255 +CTX_LH_S_CIRCULAR_INSULA_ANT +11148 221 60 140 255 +CTX_LH_S_CIRCULAR_INSULA_INF +11149 221 20 220 255 +CTX_LH_S_CIRCULAR_INSULA_SUP +11150 61 220 220 255 +CTX_LH_S_COLLAT_TRANSV_ANT +11151 100 200 200 255 +CTX_LH_S_COLLAT_TRANSV_POST +11152 10 200 200 255 +CTX_LH_S_FRONT_INF +11153 221 220 20 255 +CTX_LH_S_FRONT_MIDDLE +11154 141 20 100 255 +CTX_LH_S_FRONT_SUP +11155 61 220 100 255 +CTX_LH_S_INTERM_PRIM-JENSEN +11156 141 60 20 255 +CTX_LH_S_INTRAPARIET_AND_P_TRANS +11157 143 20 220 255 +CTX_LH_S_OC_MIDDLE_AND_LUNATUS +11158 101 60 220 255 +CTX_LH_S_OC_SUP_AND_TRANSVERSAL +11159 21 20 140 255 +CTX_LH_S_OCCIPITAL_ANT +11160 61 20 180 255 +CTX_LH_S_OC-TEMP_LAT +11161 221 140 20 255 +CTX_LH_S_OC-TEMP_MED_AND_LINGUAL +11162 141 100 220 255 +CTX_LH_S_ORBITAL_LATERAL +11163 221 100 20 255 +CTX_LH_S_ORBITAL_MED-OLFACT +11164 181 200 20 255 +CTX_LH_S_ORBITAL-H_SHAPED +11165 101 20 20 255 +CTX_LH_S_PARIETO_OCCIPITAL +11166 101 100 180 255 +CTX_LH_S_PERICALLOSAL +11167 181 220 20 255 +CTX_LH_S_POSTCENTRAL +11168 21 140 200 255 +CTX_LH_S_PRECENTRAL-INF-PART +11169 21 20 240 255 +CTX_LH_S_PRECENTRAL-SUP-PART +11170 21 20 200 255 +CTX_LH_S_SUBORBITAL +11171 21 20 60 255 +CTX_LH_S_SUBPARIETAL +11172 101 60 60 255 +CTX_LH_S_TEMPORAL_INF +11173 21 180 180 255 +CTX_LH_S_TEMPORAL_SUP +11174 223 220 60 255 +CTX_LH_S_TEMPORAL_TRANSVERSE +11175 221 60 60 255 +CTX_RH_UNKNOWN +12100 0 0 0 255 +CTX_RH_G_AND_S_FRONTOMARGIN +12101 23 220 60 255 +CTX_RH_G_AND_S_OCCIPITAL_INF +12102 23 60 180 255 +CTX_RH_G_AND_S_PARACENTRAL +12103 63 100 60 255 +CTX_RH_G_AND_S_SUBCENTRAL +12104 63 20 220 255 +CTX_RH_G_AND_S_TRANSV_FRONTOPOL +12105 13 0 250 255 +CTX_RH_G_AND_S_CINGUL-ANT +12106 26 60 0 255 +CTX_RH_G_AND_S_CINGUL-MID-ANT +12107 26 60 75 255 +CTX_RH_G_AND_S_CINGUL-MID-POST +12108 26 60 150 255 +CTX_RH_G_CINGUL-POST-DORSAL +12109 25 60 250 255 +CTX_RH_G_CINGUL-POST-VENTRAL +12110 60 25 25 255 +CTX_RH_G_CUNEUS +12111 180 20 20 255 +CTX_RH_G_FRONT_INF-OPERCULAR +12112 220 20 100 255 +CTX_RH_G_FRONT_INF-ORBITAL +12113 140 60 60 255 +CTX_RH_G_FRONT_INF-TRIANGUL +12114 180 220 140 255 +CTX_RH_G_FRONT_MIDDLE +12115 140 100 180 255 +CTX_RH_G_FRONT_SUP +12116 180 20 140 255 +CTX_RH_G_INS_LG_AND_S_CENT_INS +12117 23 10 10 255 +CTX_RH_G_INSULAR_SHORT +12118 225 140 140 255 +CTX_RH_G_OCCIPITAL_MIDDLE +12119 180 60 180 255 +CTX_RH_G_OCCIPITAL_SUP +12120 20 220 60 255 +CTX_RH_G_OC-TEMP_LAT-FUSIFOR +12121 60 20 140 255 +CTX_RH_G_OC-TEMP_MED-LINGUAL +12122 220 180 140 255 +CTX_RH_G_OC-TEMP_MED-PARAHIP +12123 65 100 20 255 +CTX_RH_G_ORBITAL +12124 220 60 20 255 +CTX_RH_G_PARIET_INF-ANGULAR +12125 20 60 220 255 +CTX_RH_G_PARIET_INF-SUPRAMAR +12126 100 100 60 255 +CTX_RH_G_PARIETAL_SUP +12127 220 180 220 255 +CTX_RH_G_POSTCENTRAL +12128 20 180 140 255 +CTX_RH_G_PRECENTRAL +12129 60 140 180 255 +CTX_RH_G_PRECUNEUS +12130 25 20 140 255 +CTX_RH_G_RECTUS +12131 20 60 100 255 +CTX_RH_G_SUBCALLOSAL +12132 60 220 20 255 +CTX_RH_G_TEMP_SUP-G_T_TRANSV +12133 60 60 220 255 +CTX_RH_G_TEMP_SUP-LATERAL +12134 220 60 220 255 +CTX_RH_G_TEMP_SUP-PLAN_POLAR +12135 65 220 60 255 +CTX_RH_G_TEMP_SUP-PLAN_TEMPO +12136 25 140 20 255 +CTX_RH_G_TEMPORAL_INF +12137 220 220 100 255 +CTX_RH_G_TEMPORAL_MIDDLE +12138 180 60 60 255 +CTX_RH_LAT_FIS-ANT-HORIZONT +12139 61 20 220 255 +CTX_RH_LAT_FIS-ANT-VERTICAL +12140 61 20 60 255 +CTX_RH_LAT_FIS-POST +12141 61 60 100 255 +CTX_RH_MEDIAL_WALL +12142 25 25 25 255 +CTX_RH_POLE_OCCIPITAL +12143 140 20 60 255 +CTX_RH_POLE_TEMPORAL +12144 220 180 20 255 +CTX_RH_S_CALCARINE +12145 63 180 180 255 +CTX_RH_S_CENTRAL +12146 221 20 10 255 +CTX_RH_S_CINGUL-MARGINALIS +12147 221 20 100 255 +CTX_RH_S_CIRCULAR_INSULA_ANT +12148 221 60 140 255 +CTX_RH_S_CIRCULAR_INSULA_INF +12149 221 20 220 255 +CTX_RH_S_CIRCULAR_INSULA_SUP +12150 61 220 220 255 +CTX_RH_S_COLLAT_TRANSV_ANT +12151 100 200 200 255 +CTX_RH_S_COLLAT_TRANSV_POST +12152 10 200 200 255 +CTX_RH_S_FRONT_INF +12153 221 220 20 255 +CTX_RH_S_FRONT_MIDDLE +12154 141 20 100 255 +CTX_RH_S_FRONT_SUP +12155 61 220 100 255 +CTX_RH_S_INTERM_PRIM-JENSEN +12156 141 60 20 255 +CTX_RH_S_INTRAPARIET_AND_P_TRANS +12157 143 20 220 255 +CTX_RH_S_OC_MIDDLE_AND_LUNATUS +12158 101 60 220 255 +CTX_RH_S_OC_SUP_AND_TRANSVERSAL +12159 21 20 140 255 +CTX_RH_S_OCCIPITAL_ANT +12160 61 20 180 255 +CTX_RH_S_OC-TEMP_LAT +12161 221 140 20 255 +CTX_RH_S_OC-TEMP_MED_AND_LINGUAL +12162 141 100 220 255 +CTX_RH_S_ORBITAL_LATERAL +12163 221 100 20 255 +CTX_RH_S_ORBITAL_MED-OLFACT +12164 181 200 20 255 +CTX_RH_S_ORBITAL-H_SHAPED +12165 101 20 20 255 +CTX_RH_S_PARIETO_OCCIPITAL +12166 101 100 180 255 +CTX_RH_S_PERICALLOSAL +12167 181 220 20 255 +CTX_RH_S_POSTCENTRAL +12168 21 140 200 255 +CTX_RH_S_PRECENTRAL-INF-PART +12169 21 20 240 255 +CTX_RH_S_PRECENTRAL-SUP-PART +12170 21 20 200 255 +CTX_RH_S_SUBORBITAL +12171 21 20 60 255 +CTX_RH_S_SUBPARIETAL +12172 101 60 60 255 +CTX_RH_S_TEMPORAL_INF +12173 21 180 180 255 +CTX_RH_S_TEMPORAL_SUP +12174 223 220 60 255 +CTX_RH_S_TEMPORAL_TRANSVERSE +12175 221 60 60 255 +WM_LH_UNKNOWN +13100 0 0 0 255 +WM_LH_G_AND_S_FRONTOMARGIN +13101 23 220 60 255 +WM_LH_G_AND_S_OCCIPITAL_INF +13102 23 60 180 255 +WM_LH_G_AND_S_PARACENTRAL +13103 63 100 60 255 +WM_LH_G_AND_S_SUBCENTRAL +13104 63 20 220 255 +WM_LH_G_AND_S_TRANSV_FRONTOPOL +13105 13 0 250 255 +WM_LH_G_AND_S_CINGUL-ANT +13106 26 60 0 255 +WM_LH_G_AND_S_CINGUL-MID-ANT +13107 26 60 75 255 +WM_LH_G_AND_S_CINGUL-MID-POST +13108 26 60 150 255 +WM_LH_G_CINGUL-POST-DORSAL +13109 25 60 250 255 +WM_LH_G_CINGUL-POST-VENTRAL +13110 60 25 25 255 +WM_LH_G_CUNEUS +13111 180 20 20 255 +WM_LH_G_FRONT_INF-OPERCULAR +13112 220 20 100 255 +WM_LH_G_FRONT_INF-ORBITAL +13113 140 60 60 255 +WM_LH_G_FRONT_INF-TRIANGUL +13114 180 220 140 255 +WM_LH_G_FRONT_MIDDLE +13115 140 100 180 255 +WM_LH_G_FRONT_SUP +13116 180 20 140 255 +WM_LH_G_INS_LG_AND_S_CENT_INS +13117 23 10 10 255 +WM_LH_G_INSULAR_SHORT +13118 225 140 140 255 +WM_LH_G_OCCIPITAL_MIDDLE +13119 180 60 180 255 +WM_LH_G_OCCIPITAL_SUP +13120 20 220 60 255 +WM_LH_G_OC-TEMP_LAT-FUSIFOR +13121 60 20 140 255 +WM_LH_G_OC-TEMP_MED-LINGUAL +13122 220 180 140 255 +WM_LH_G_OC-TEMP_MED-PARAHIP +13123 65 100 20 255 +WM_LH_G_ORBITAL +13124 220 60 20 255 +WM_LH_G_PARIET_INF-ANGULAR +13125 20 60 220 255 +WM_LH_G_PARIET_INF-SUPRAMAR +13126 100 100 60 255 +WM_LH_G_PARIETAL_SUP +13127 220 180 220 255 +WM_LH_G_POSTCENTRAL +13128 20 180 140 255 +WM_LH_G_PRECENTRAL +13129 60 140 180 255 +WM_LH_G_PRECUNEUS +13130 25 20 140 255 +WM_LH_G_RECTUS +13131 20 60 100 255 +WM_LH_G_SUBCALLOSAL +13132 60 220 20 255 +WM_LH_G_TEMP_SUP-G_T_TRANSV +13133 60 60 220 255 +WM_LH_G_TEMP_SUP-LATERAL +13134 220 60 220 255 +WM_LH_G_TEMP_SUP-PLAN_POLAR +13135 65 220 60 255 +WM_LH_G_TEMP_SUP-PLAN_TEMPO +13136 25 140 20 255 +WM_LH_G_TEMPORAL_INF +13137 220 220 100 255 +WM_LH_G_TEMPORAL_MIDDLE +13138 180 60 60 255 +WM_LH_LAT_FIS-ANT-HORIZONT +13139 61 20 220 255 +WM_LH_LAT_FIS-ANT-VERTICAL +13140 61 20 60 255 +WM_LH_LAT_FIS-POST +13141 61 60 100 255 +WM_LH_MEDIAL_WALL +13142 25 25 25 255 +WM_LH_POLE_OCCIPITAL +13143 140 20 60 255 +WM_LH_POLE_TEMPORAL +13144 220 180 20 255 +WM_LH_S_CALCARINE +13145 63 180 180 255 +WM_LH_S_CENTRAL +13146 221 20 10 255 +WM_LH_S_CINGUL-MARGINALIS +13147 221 20 100 255 +WM_LH_S_CIRCULAR_INSULA_ANT +13148 221 60 140 255 +WM_LH_S_CIRCULAR_INSULA_INF +13149 221 20 220 255 +WM_LH_S_CIRCULAR_INSULA_SUP +13150 61 220 220 255 +WM_LH_S_COLLAT_TRANSV_ANT +13151 100 200 200 255 +WM_LH_S_COLLAT_TRANSV_POST +13152 10 200 200 255 +WM_LH_S_FRONT_INF +13153 221 220 20 255 +WM_LH_S_FRONT_MIDDLE +13154 141 20 100 255 +WM_LH_S_FRONT_SUP +13155 61 220 100 255 +WM_LH_S_INTERM_PRIM-JENSEN +13156 141 60 20 255 +WM_LH_S_INTRAPARIET_AND_P_TRANS +13157 143 20 220 255 +WM_LH_S_OC_MIDDLE_AND_LUNATUS +13158 101 60 220 255 +WM_LH_S_OC_SUP_AND_TRANSVERSAL +13159 21 20 140 255 +WM_LH_S_OCCIPITAL_ANT +13160 61 20 180 255 +WM_LH_S_OC-TEMP_LAT +13161 221 140 20 255 +WM_LH_S_OC-TEMP_MED_AND_LINGUAL +13162 141 100 220 255 +WM_LH_S_ORBITAL_LATERAL +13163 221 100 20 255 +WM_LH_S_ORBITAL_MED-OLFACT +13164 181 200 20 255 +WM_LH_S_ORBITAL-H_SHAPED +13165 101 20 20 255 +WM_LH_S_PARIETO_OCCIPITAL +13166 101 100 180 255 +WM_LH_S_PERICALLOSAL +13167 181 220 20 255 +WM_LH_S_POSTCENTRAL +13168 21 140 200 255 +WM_LH_S_PRECENTRAL-INF-PART +13169 21 20 240 255 +WM_LH_S_PRECENTRAL-SUP-PART +13170 21 20 200 255 +WM_LH_S_SUBORBITAL +13171 21 20 60 255 +WM_LH_S_SUBPARIETAL +13172 101 60 60 255 +WM_LH_S_TEMPORAL_INF +13173 21 180 180 255 +WM_LH_S_TEMPORAL_SUP +13174 223 220 60 255 +WM_LH_S_TEMPORAL_TRANSVERSE +13175 221 60 60 255 +WM_RH_UNKNOWN +14100 0 0 0 255 +WM_RH_G_AND_S_FRONTOMARGIN +14101 23 220 60 255 +WM_RH_G_AND_S_OCCIPITAL_INF +14102 23 60 180 255 +WM_RH_G_AND_S_PARACENTRAL +14103 63 100 60 255 +WM_RH_G_AND_S_SUBCENTRAL +14104 63 20 220 255 +WM_RH_G_AND_S_TRANSV_FRONTOPOL +14105 13 0 250 255 +WM_RH_G_AND_S_CINGUL-ANT +14106 26 60 0 255 +WM_RH_G_AND_S_CINGUL-MID-ANT +14107 26 60 75 255 +WM_RH_G_AND_S_CINGUL-MID-POST +14108 26 60 150 255 +WM_RH_G_CINGUL-POST-DORSAL +14109 25 60 250 255 +WM_RH_G_CINGUL-POST-VENTRAL +14110 60 25 25 255 +WM_RH_G_CUNEUS +14111 180 20 20 255 +WM_RH_G_FRONT_INF-OPERCULAR +14112 220 20 100 255 +WM_RH_G_FRONT_INF-ORBITAL +14113 140 60 60 255 +WM_RH_G_FRONT_INF-TRIANGUL +14114 180 220 140 255 +WM_RH_G_FRONT_MIDDLE +14115 140 100 180 255 +WM_RH_G_FRONT_SUP +14116 180 20 140 255 +WM_RH_G_INS_LG_AND_S_CENT_INS +14117 23 10 10 255 +WM_RH_G_INSULAR_SHORT +14118 225 140 140 255 +WM_RH_G_OCCIPITAL_MIDDLE +14119 180 60 180 255 +WM_RH_G_OCCIPITAL_SUP +14120 20 220 60 255 +WM_RH_G_OC-TEMP_LAT-FUSIFOR +14121 60 20 140 255 +WM_RH_G_OC-TEMP_MED-LINGUAL +14122 220 180 140 255 +WM_RH_G_OC-TEMP_MED-PARAHIP +14123 65 100 20 255 +WM_RH_G_ORBITAL +14124 220 60 20 255 +WM_RH_G_PARIET_INF-ANGULAR +14125 20 60 220 255 +WM_RH_G_PARIET_INF-SUPRAMAR +14126 100 100 60 255 +WM_RH_G_PARIETAL_SUP +14127 220 180 220 255 +WM_RH_G_POSTCENTRAL +14128 20 180 140 255 +WM_RH_G_PRECENTRAL +14129 60 140 180 255 +WM_RH_G_PRECUNEUS +14130 25 20 140 255 +WM_RH_G_RECTUS +14131 20 60 100 255 +WM_RH_G_SUBCALLOSAL +14132 60 220 20 255 +WM_RH_G_TEMP_SUP-G_T_TRANSV +14133 60 60 220 255 +WM_RH_G_TEMP_SUP-LATERAL +14134 220 60 220 255 +WM_RH_G_TEMP_SUP-PLAN_POLAR +14135 65 220 60 255 +WM_RH_G_TEMP_SUP-PLAN_TEMPO +14136 25 140 20 255 +WM_RH_G_TEMPORAL_INF +14137 220 220 100 255 +WM_RH_G_TEMPORAL_MIDDLE +14138 180 60 60 255 +WM_RH_LAT_FIS-ANT-HORIZONT +14139 61 20 220 255 +WM_RH_LAT_FIS-ANT-VERTICAL +14140 61 20 60 255 +WM_RH_LAT_FIS-POST +14141 61 60 100 255 +WM_RH_MEDIAL_WALL +14142 25 25 25 255 +WM_RH_POLE_OCCIPITAL +14143 140 20 60 255 +WM_RH_POLE_TEMPORAL +14144 220 180 20 255 +WM_RH_S_CALCARINE +14145 63 180 180 255 +WM_RH_S_CENTRAL +14146 221 20 10 255 +WM_RH_S_CINGUL-MARGINALIS +14147 221 20 100 255 +WM_RH_S_CIRCULAR_INSULA_ANT +14148 221 60 140 255 +WM_RH_S_CIRCULAR_INSULA_INF +14149 221 20 220 255 +WM_RH_S_CIRCULAR_INSULA_SUP +14150 61 220 220 255 +WM_RH_S_COLLAT_TRANSV_ANT +14151 100 200 200 255 +WM_RH_S_COLLAT_TRANSV_POST +14152 10 200 200 255 +WM_RH_S_FRONT_INF +14153 221 220 20 255 +WM_RH_S_FRONT_MIDDLE +14154 141 20 100 255 +WM_RH_S_FRONT_SUP +14155 61 220 100 255 +WM_RH_S_INTERM_PRIM-JENSEN +14156 141 60 20 255 +WM_RH_S_INTRAPARIET_AND_P_TRANS +14157 143 20 220 255 +WM_RH_S_OC_MIDDLE_AND_LUNATUS +14158 101 60 220 255 +WM_RH_S_OC_SUP_AND_TRANSVERSAL +14159 21 20 140 255 +WM_RH_S_OCCIPITAL_ANT +14160 61 20 180 255 +WM_RH_S_OC-TEMP_LAT +14161 221 140 20 255 +WM_RH_S_OC-TEMP_MED_AND_LINGUAL +14162 141 100 220 255 +WM_RH_S_ORBITAL_LATERAL +14163 221 100 20 255 +WM_RH_S_ORBITAL_MED-OLFACT +14164 181 200 20 255 +WM_RH_S_ORBITAL-H_SHAPED +14165 101 20 20 255 +WM_RH_S_PARIETO_OCCIPITAL +14166 101 100 180 255 +WM_RH_S_PERICALLOSAL +14167 181 220 20 255 +WM_RH_S_POSTCENTRAL +14168 21 140 200 255 +WM_RH_S_PRECENTRAL-INF-PART +14169 21 20 240 255 +WM_RH_S_PRECENTRAL-SUP-PART +14170 21 20 200 255 +WM_RH_S_SUBORBITAL +14171 21 20 60 255 +WM_RH_S_SUBPARIETAL +14172 101 60 60 255 +WM_RH_S_TEMPORAL_INF +14173 21 180 180 255 +WM_RH_S_TEMPORAL_SUP +14174 223 220 60 255 +WM_RH_S_TEMPORAL_TRANSVERSE +14175 221 60 60 255 diff --git a/pantheon/data/hcp_config/FreeSurferSubcorticalLabelTableLut.txt b/pantheon/data/hcp_config/FreeSurferSubcorticalLabelTableLut.txt new file mode 100644 index 0000000..8506938 --- /dev/null +++ b/pantheon/data/hcp_config/FreeSurferSubcorticalLabelTableLut.txt @@ -0,0 +1,39 @@ +ACCUMBENS_LEFT +26 255 165 0 255 +ACCUMBENS_RIGHT +58 255 165 0 255 +AMYGDALA_LEFT +18 103 255 255 255 +AMYGDALA_RIGHT +54 103 255 255 255 +BRAIN_STEM +16 119 159 176 255 +CAUDATE_LEFT +11 122 186 220 255 +CAUDATE_RIGHT +50 122 186 220 255 +CEREBELLUM_LEFT +8 230 148 34 255 +CEREBELLUM_RIGHT +47 230 148 34 255 +DIENCEPHALON_VENTRAL_LEFT +28 165 42 42 255 +DIENCEPHALON_VENTRAL_RIGHT +60 165 42 42 255 +HIPPOCAMPUS_LEFT +17 220 216 20 255 +HIPPOCAMPUS_RIGHT +53 220 216 20 255 +PALLIDUM_LEFT +13 12 48 255 255 +PALLIDUM_RIGHT +52 13 48 255 255 +PUTAMEN_LEFT +12 236 13 176 255 +PUTAMEN_RIGHT +51 236 13 176 255 +THALAMUS_LEFT +10 0 118 14 255 +THALAMUS_RIGHT +49 0 118 14 255 + diff --git a/pantheon/data/hcp_config/MSMSulcStrainFinalconf b/pantheon/data/hcp_config/MSMSulcStrainFinalconf new file mode 100644 index 0000000..f1b2f5b --- /dev/null +++ b/pantheon/data/hcp_config/MSMSulcStrainFinalconf @@ -0,0 +1,18 @@ +--simval=3,2,2,2 +--sigma_in=0,0,0,0 +--sigma_ref=0,0,0,0 +--lambda=0,10,7.5,7.5 +--it=50,10,15,15 +--opt=AFFINE,DISCRETE,DISCRETE,DISCRETE +--CPgrid=6,2,3,4 +--SGgrid=6,4,5,6 +--datagrid=6,4,5,6 +--regoption=3 +--regexp=2 +--dopt=HOCR +--VN +--rescaleL +--triclique +--k_exponent=2 +--bulkmod=1.6 +--shearmod=0.4 diff --git a/pantheon/data/roi_definition_example.yaml b/pantheon/data/roi_definition_example.yaml new file mode 100644 index 0000000..3f75b21 --- /dev/null +++ b/pantheon/data/roi_definition_example.yaml @@ -0,0 +1,24 @@ +--- +AG: + { + AtlasID: MMP1, + AtlasSpace: fsLR, + ROIType: Surface, + IndexL: [323, 330, 331], + IndexR: [143, 150, 151], + LabelL: [L_PGp, L_PGi, L_PGs], + LabelR: [R_PGp, R_PGi, R_PGs], + Note: "", + } + +HPC: + { + AtlasID: Template_ASeg, + AtlasSpace: MNI152NLin6Asym, + ROIType: Volume, + IndexL: [17], + IndexR: [53], + LabelL: [HIPPOCAMPUS_LEFT], + LabelR: [HIPPOCAMPUS_RIGHT], + Note: "", + } \ No newline at end of file diff --git a/pantheon/data/template_example.yaml b/pantheon/data/template_example.yaml new file mode 100644 index 0000000..2976abc --- /dev/null +++ b/pantheon/data/template_example.yaml @@ -0,0 +1,5 @@ +--- +MNI152NLin6Asym: + data/external/template/tpl-MNI152NLin6Asym +fsLR_32k: + data/external/template/tpl-fsLR_32k_S1200 diff --git a/pantheon/glm/__init__.py b/pantheon/glm/__init__.py new file mode 100644 index 0000000..a5682fb --- /dev/null +++ b/pantheon/glm/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/pantheon/glm/confound.py b/pantheon/glm/confound.py new file mode 100644 index 0000000..7327458 --- /dev/null +++ b/pantheon/glm/confound.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Functions relate to GLM confound regressor.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: +# + +from __future__ import annotations +from typing import Optional, Union +import os +from pathlib import Path +import numpy as np +import pandas as pd + +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +def make_confound_regressor( + df: pd.DataFrame, + out_dir: PathLike, + demean: bool = True, + split_into_pad_runs: bool = True, + confound_list: list[str] = [ + "trans_x", + "trans_y", + "trans_z", + "rot_x", + "rot_y", + "rot_z", + "framewise_displacement", + ], + prefix: Optional[str] = None, +) -> list[Path]: + """Makes confound regressors file. + + This function makes 1D txt file that could be read by AFNI's + programs. It also has the ability to make run-specific padded files. + This is very useful when building a design matrix contains all runs. + + Args: + df: Confound dataframe. + out_dir: Directory to store output file. + demean: If true, remove mean value from each column. + split_into_pad_runs: If true, make run-specific confound files + with same length as input df. Values in rows doesn't belong + to the current run are filled with 0. + confound_list: Confound names include in the output file. Every + specified confound should present in the df. + prefix: Filename prefix of the output file. If it's None, the + default filename is confound.1D (or ${run_id}_confound.1D). + + Returns: + A confound regressor file which could be used in AFNI's + 3dDeconvolve program. + If 'split_into_pad_runs' is true, returning a list of filenames + corresponds to each run in the df. + + Raises: + ValueError: Less than 2 runs in df if 'split_into_pad_runs' is + true. + """ + + print(f"Confound regressor: {', '.join(confound_list)}.") + if prefix: + prefix = prefix if prefix.endswith("_") else f"{prefix}_" + else: + prefix = "" + # Get run list if split_into_pad_runs + if split_into_pad_runs: + run_list = df["run_id"].unique().tolist() + if len(run_list) < 2: + raise ValueError("There should be at least 2 runs if 'split_into_pad_runs' is true.") + # Mean-center confound regressors + if demean: + if split_into_pad_runs: + confound = ( + df.loc[:, ["run_id"] + confound_list] + .groupby(by=["run_id"], sort=False) + .transform(lambda x: x - x.mean()) + ) + confound = confound.fillna(0) + print("Mean center all regressors within each run.") + else: + confound = (df.loc[:, confound_list] - df.loc[:, confound_list].mean()).fillna(0) + print("Mean center all regressors.") + # Or not + else: + confound = df.loc[:, confound_list].fillna(0) + # Convert confound regressors for per run regression + if split_into_pad_runs: + confound_split = dict() + for run_id in run_list: + confound_split[run_id] = np.zeros((df.shape[0], len(confound_list))) + confound_split[run_id][df.run_id == run_id, :] = confound.loc[ + df.run_id == run_id, : + ].to_numpy() + # Write confound regressors to file + fname = out_dir.joinpath(f"{prefix}confound.1D") + confound_file = [fname] + np.savetxt(fname, confound, fmt="%.6f") + if split_into_pad_runs: + confound_file = [] + for run_id in run_list: + fname = out_dir.joinpath(f"{prefix}{run_id}_confound.1D") + confound_file.append(fname) + np.savetxt(fname, confound_split[run_id], fmt="%.6f") + + return confound_file + + +def make_good_tr_regressor( + df: pd.DataFrame, + out_dir: os.PathLike, + censor_prev_tr: bool = True, + fd_thresh: Optional[float] = 0.5, + enorm_thresh: Optional[float] = 0.2, + extra_censor: Optional[pd.DataFrame] = None, + prefix: Optional[str] = None, + dry_run: bool = False, +) -> tuple(Path, Path): + """Calculates good TR based on motion parameters. + + Args: + df: Confound dataframe. + out_dir: Directory to store output file. + censor_pre_tr: If true, also mark the the time point before a + bad TR as bad. + fd_thresh: Framewise displacement threshold. TRs exceed this are + marked as bad. + enorm_thresh: Eucilidean norm threshold. TRs exceed this are + marked as bad. + extra_censor: Extra censor information. It should be a dataframe + with a column named 'is_good'. The values in the column + could be 1 or 0, which 1 represents good TR and 0 represents + bad TR. This information will be combined with the fd and + enorm based method to determine the final good TR list. + prefix: Filename prefix of the output file. If it's None, the + default filename is goodtr.1D and censor_info.csv. + dry_run: If true, only print out censored TR information, + instead of writing output files. + + Returns: + A tuple (GoodTR, MotionCensor), where GoodTR is the filename of + the good TR file, and MotionCensor is the filename of the + detailed motion metric file. + """ + + if prefix: + prefix = prefix if prefix.endswith("_") else f"{prefix}_" + else: + prefix = "" + # Get run length list if ignore_first_volume_per_run + assert "run_id" in df.columns, "Column 'run_id' not found in the input dataframe." + run_list = df["run_id"].unique().tolist() + run_lengths = [] + for run_id in run_list: + run_lengths.append(df.loc[df.run_id == run_id, :].shape[0]) + # Create good tr file for timepoint censor + good_tr = np.ones(df.shape[0], dtype=np.int16) + motion_censor = df[[]].copy() + # Censor TR based on Framewise displacement (L1 norm) + if fd_thresh: + print(f"Framewise Displacement threshold: {fd_thresh}") + assert ( + "framewise_displacement" in df.columns + ), "Column 'framewise_displacement' not found ..." + fd = df["framewise_displacement"].to_numpy() + fd = np.nan_to_num(fd, nan=0) + # Set first volume of each run to 0 + for i in range(1, len(run_lengths)): + fd[np.sum(run_lengths[:i])] = 0 + motion_censor["fd"] = fd + motion_censor["fd_censor"] = np.where(motion_censor["fd"] > fd_thresh, 0, 1) + good_tr = good_tr * motion_censor["fd_censor"].to_numpy() + # Censor TR based on Euclidean Norm (L2 norm) + if enorm_thresh: + print(f"Euclidean Norm threshold: {enorm_thresh}") + enorm = calc_motion_enorm(df) + # Set first volume of each run to 0 + for i in range(1, len(run_lengths)): + enorm[np.sum(run_lengths[:i])] = 0 + motion_censor["enorm"] = enorm + motion_censor["enorm_censor"] = np.where(motion_censor["enorm"] > enorm_thresh, 0, 1) + good_tr = good_tr * motion_censor["enorm_censor"].to_numpy() + # Extra censor from external source + if extra_censor: + if "is_good" in extra_censor.columns: + print("Calculate bad TR based on extra censor data ...") + good_tr = good_tr * extra_censor["is_good"] + else: + print("Column 'is_good' not found in dataframe extra_censor.") + # Also censor previous TR when a TR is marked as bad + if censor_prev_tr: + good_tr[:-1] = good_tr[:-1] * good_tr[1:] + # Write good tr and motion censor info to file + good_tr_file = out_dir.joinpath(f"{prefix}goodtr.1D") + motion_censor_file = out_dir.joinpath(f"{prefix}censor_info.csv") + if not dry_run: + good_tr = good_tr.T.astype(np.int16) + np.savetxt(good_tr_file, good_tr, fmt="%i") + motion_censor.to_csv(motion_censor_file, index=False) + + n_censor = np.sum(good_tr == 0) + pct_censor = np.mean(good_tr == 0) * 100 + print(f"Total censored TR number: {n_censor}({pct_censor:.2f}%)") + + return good_tr_file, motion_censor_file + + +def make_highpass_regressor( + n_timepoint: int, + repetition_time: Union[float, int], + out_dir: PathLike, + hp_freq: Union[float, int] = 0.01, + prefix: Optional[str] = None, +) -> Path: + """Makes highpass filter regressors. + + This function creates a set of columns of sines and cosines for the + purpose of highpass temporal filtering. See AFNI's 1dBport for + detailed explanations. + + Args: + n_timepoint: Numerber of time points. + repetition_time: Repetition time. + out_dir: Directory to store output file. + hp_freq: Cutoff frequency in Hz. + prefix: Filename prefix of the output file. If it's None, the + default filename is highpass.1D. + + Returns: + A highpass filter regressor file which could be used in AFNI's + 3dDeconvolve program. + """ + + if prefix: + prefix = prefix if prefix.endswith("_") else f"{prefix}_" + else: + prefix = "" + out_file = Path(out_dir).joinpath(f"{prefix}highpass.1D") + res = run_cmd( + f"1dBport -nodata {n_timepoint} {repetition_time} -band 0 {hp_freq} -nozero", + print_output=False, + ) + with open(out_file, "w") as f: + f.write(res.stdout) + return out_file + + +def calc_motion_enorm(df: pd.DataFrame) -> np.ndarray: + """Calculates euclidean norm from motion parameters. + + Args: + df: A dataframe contains motion parameters. The column names of + the motion parameters should be trans_x, trans_y, trans_z, + rot_x, rot_y and rot_z. + + Returns: + A 1d numpy array contains euclidean norm of the motion + parameters (difference between T and T-1). + """ + + mot_par = ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"] + enorm = np.sqrt( + np.sum((df[mot_par].to_numpy()[1:, :] - df[mot_par].to_numpy()[:-1, :]) ** 2, axis=1) + ) + enorm = np.insert(enorm, 0, 0) + return enorm + + +def remove_allzero_column(confound_file: PathLike) -> tuple[Path, list]: + """Removes all zero column in confound regressor file. + + This functions overwrites the input confound regressor file. It is + useful when the head motions are very small in some direction. In + such case, the regressors will contain only zeros under a give float + precision, which could be problematic for GLM programs. + + Args: + confound_file: Confound regressor file. + + Returns: + A tuple (ConfoundFile, Index), where ConfoundFile is the + filename of the input confound regressor file, and the Index is + the index of columns only have zeros. + """ + + confound = np.loadtxt(confound_file) + ncol = confound.shape[1] + # Check each column + sel = [True] * ncol + for i in range(confound.shape[1]): + if np.allclose(confound[:, i], 0): + sel[i] = False + # Remove column in place + if np.sum(sel) != ncol: + confound = confound[:, sel] + print(f"WARNING: Removing {ncol-np.sum(sel)} all zero column!") + np.savetxt(confound_file, confound, fmt="%.6f") + # get bad column index + allzero_column_index = list(np.arange(ncol)[np.invert(sel)]) + else: + allzero_column_index = [] + return confound_file, allzero_column_index diff --git a/pantheon/glm/data.py b/pantheon/glm/data.py new file mode 100644 index 0000000..2a8a3d5 --- /dev/null +++ b/pantheon/glm/data.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Functions relate to data used in GLM.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from __future__ import annotations +from typing import Optional, Union +from pathlib import Path +import tempfile + +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +def scale_func_image( + in_file: PathLike, + out_file: PathLike, + mask_file: Optional[PathLike] = None, + cifti: bool = False, +) -> Path: + """Scales a image in range of 0 to 200. + + By scaling BOLD data in range [0, 200] with mean equals to 100, the + beta derived from GLM roughly represents the percentage of signal + change (all regressors are scaled to unit size). See AFNI documents + for details. + + Args: + in_file: A functional image file. + out_file: Output functional file. + mask_file: A mask file applies to input image during scaling. It + only enables when input file is a NIFTI or GIFTI file. + cifti: If true, treat input file as a CIFTI file and scale it + with wb_command. + + Returns: + A functional image file. + """ + + with tempfile.TemporaryDirectory() as tmp_dir: + + if cifti: + # Calculate temporal mean image + mean_file = Path(tmp_dir, "mean.dscalar.nii") + run_cmd(f"wb_command -disable-provenance -cifti-reduce {in_file} MEAN {mean_file}") + # Scale func image in to 0-200 range + scale_expr = f"min(200,(a/b)*100)*(a>0)*(b>0)" + run_cmd( + f"wb_command -disable-provenance -cifti-math '{scale_expr}' " + f"{out_file} -var a {in_file} -var b {mean_file} -select 1 1 -repeat", + print_output=False, + ) + else: + # Calculate temporal mean image + if Path(in_file).suffix.endswith("gii"): + mean_file = Path(tmp_dir, "mean.shape.gii") + else: + mean_file = Path(tmp_dir, "mean.nii.gz") + run_cmd(f"3dTstat -mean -prefix {mean_file} {in_file}") + # Scale func image in to 0-200 range + if mask_file: + scale_expr = f"c*min(200,(a/b)*100)*step(a)*step(b)" + run_cmd( + f"3dcalc -a {in_file} -b {mean_file} -c {mask_file} " + f"-expr '{scale_expr}' -prefix {out_file}" + ) + else: + scale_expr = f"min(200,(a/b)*100)*step(a)*step(b)" + run_cmd( + f"3dcalc -a {in_file} -b {mean_file} " + f"-expr '{scale_expr}' -prefix {out_file}" + ) + + return Path(out_file) + + +def calc_run_length(in_file: Union[PathLike, list[PathLike]], cifti: bool = False) -> list[int]: + """ + Calculates functional image length (number of time points). + + Args: + in_file: A single or a list of functional image files. + cifti: If true, treat input file as a CIFTI file. + + Returns: + A list of lengths (number of timepoints) of the input files. + """ + + if not isinstance(in_file, list): + in_file = [in_file] + run_length = [] + for f in in_file: + # CIFTI or GIFTI file + if cifti or (Path(f).suffix.endswith("gii")): + cmd = ["wb_command", "-file-information", f, "-only-number-of-maps"] + # NIFTI file + else: + cmd = ["fslnvols", f] + ntp = int(run_cmd(cmd, print_output=False).stdout) + run_length.append(ntp) + return run_length diff --git a/pantheon/glm/model.py b/pantheon/glm/model.py new file mode 100644 index 0000000..1e75ad2 --- /dev/null +++ b/pantheon/glm/model.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Functions relate to fit GLM.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from __future__ import annotations +from typing import Optional +from pathlib import Path +import shutil +import tempfile +import nibabel as nib + +from ..image.cifti import split_dtseries +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +def fit_3dREMLfit_cifti( + func_file: PathLike, + design_matrix_file: PathLike, + out_dir: PathLike, + left_roi_file: Optional[PathLike] = None, + right_roi_file: Optional[PathLike] = None, + volume_label_file: Optional[PathLike] = None, + prefix: Optional[str] = None, + extra_3dremlfit_args: str = "-tout -rout -noFDR -nobout -quiet", + debug: bool = False, +) -> Path: + """Fits GLM with AFNI's 3dREMLfit on CIFTI file. + + Args: + func_file: A CIFTI dtseries/dscalar file. + design_matrix_file: Design matrix file in AFNI format. + out_dir: Directory to store output files. + left_roi_file: Left surface mask file. Optional. + right_roi_file: Right surfce mask file. Optional. + volume_label_file: Volume structure label file. This file is + required if the input CIFTI file has volume part. + prefix: The output filename prefix (before .dscalar.nii). + If None, use default names. + extra_3dremlfit_args: Extra arguments pass to AFNI's 3dREMLFit + program. + debug: If true, save intermediate files to fitted_bucket folder + inside the out_dir. + + Returns: + A CIFTI file dscalar contains all outputs from 3dREMLFit. + + Raises: + ValueError: The volume_label_file is None when there's volume + part in the input CIFTI file. + """ + + with tempfile.TemporaryDirectory(dir=out_dir) as tmp_dir: + + # Parse prefix + func_prefix = "func" if prefix is None else prefix + model_prefix = "fitted" if prefix is None else prefix + + # Check which part presents in the CIFTI file and create + # filenames for splitted parts + left_surf_file, right_surf_file, volume_file, volume_mask_file = None, None, None, None + axis_bm = nib.load(func_file).header.get_axis(1) + if "CIFTI_STRUCTURE_CORTEX_LEFT" in axis_bm.name: + left_surf_file = Path(tmp_dir, f"{func_prefix}_hemi-L.func.gii") + if "CIFTI_STRUCTURE_CORTEX_RIGHT" in axis_bm.name: + right_surf_file = Path(tmp_dir, f"{func_prefix}_hemi-R.func.gii") + if axis_bm.volume_shape is not None: + if volume_label_file is None: + raise ValueError( + "There is volume part in the input CIFTI file. " + "A volume_label_file is required" + ) + volume_file = Path(tmp_dir, f"{func_prefix}_volume.nii.gz") + volume_mask_file = Path(tmp_dir, f"{func_prefix}_volume_mask.nii.gz") + # Split cifti file to left/right surfaces and volume image + # index 0: left surface; index 1: right surface; index2: volume + # index 3: volume mask + _ = split_dtseries( + func_file, + left_surf_out_file=left_surf_file, + right_surf_out_file=right_surf_file, + volume_out_file=volume_file, + volume_mask_out_file=volume_mask_file, + ) + + # Fit GLM using AFNI's 3dREMLfit + out_file = fit_3dREMLfit_cifti_separate( + design_matrix_file, + out_dir, + left_surf_file=left_surf_file, + right_surf_file=right_surf_file, + volume_file=volume_file, + volume_mask_file=volume_mask_file, + left_roi_file=left_roi_file, + right_roi_file=right_roi_file, + volume_label_file=volume_label_file, + prefix=model_prefix, + extra_3dremlfit_args=extra_3dremlfit_args, + debug=debug, + ) + if debug: + debug_dir = Path(out_dir).joinpath("debug", model_prefix) + shutil.copytree(tmp_dir, debug_dir, dirs_exist_ok=True) + + return out_file + + +def fit_3dREMLfit_cifti_separate( + design_matrix_file: PathLike, + out_dir: PathLike, + left_surf_file: Optional[PathLike] = None, + right_surf_file: Optional[PathLike] = None, + volume_file: Optional[PathLike] = None, + volume_mask_file: Optional[PathLike] = None, + left_roi_file: Optional[PathLike] = None, + right_roi_file: Optional[PathLike] = None, + volume_label_file: Optional[PathLike] = None, + prefix: Optional[str] = None, + extra_3dremlfit_args: str = "-tout -rout -noFDR -nobout -quiet", + debug: bool = False, +) -> Path: + """Fits GLM with AFNI's 3dREMLfit on CIFTI file. + + This function fits models on each part of the CIFTI file, which are + specified separately. It is useful when fitting multiple models on + the same data, for example, singletrial responses estimation. + + Args: + design_matrix_file: Design matrix file in AFNI format. + out_dir: Directory to store output files. + left_surf_file: Left surface GIFTI file. Optional. + right_surf_file: Right surface GIFTI file. Optional. + volume_file: Volume NIFTI file. Optional. + volume_mask_file: Volume mask file of volume_file. Optional. + left_roi_file: Left surface mask file. Optional. + right_roi_file: Right surface mask file. Optional. + volume_label_file: Volume structure label file. This file is + required if the input CIFTI file has volume part. + prefix: The output filename prefix (before .dscalar.nii). + If None, use default names. + extra_3dremlfit_args: Extra arguments pass to AFNI's 3dREMLFit + program. + debug: If true, save intermediate files to fitted_bucket folder + inside the out_dir. + + Returns: + A CIFTI file dscalar contains all outputs from 3dREMLFit. + + Raises: + ValueError: None of the input file is specified. + ValueError: Input file's format is incorrect. + ValueError: The volume_label_file is None when volume_file is + specified. + """ + + if (left_surf_file is None) and (right_surf_file is None) and (volume_file is None): + raise ValueError("At least one input file is required.") + if (left_surf_file is not None) and (not Path(left_surf_file).name.endswith(".gii")): + raise ValueError("Argument left_surf_file should be a GIFTI file.") + if (right_surf_file is not None) and (not Path(right_surf_file).name.endswith(".gii")): + raise ValueError("Argument right_surf_file should be a GIFTI file.") + if (volume_file is not None) and (not Path(volume_file).name.endswith(("nii.gz", ".nii"))): + raise ValueError("Argument volume_file should be a NIFTI file.") + if (volume_file is not None) and (volume_label_file is None): + raise ValueError("When volume_file is specified, the volume_label_file is required.") + + # Parse prefix + prefix = "fitted" if prefix is None else prefix + + with tempfile.TemporaryDirectory(dir=out_dir) as tmp_dir: + + # Fit GLM using AFNI's 3dREMLfit + bucket_file = [] + # surface + for i, j, hemi in zip( + [left_surf_file, right_surf_file], [left_roi_file, right_roi_file], ["L", "R"] + ): + if i is not None: + mask = f"-mask {j}" if j is not None else "" + out_file = Path(tmp_dir, f"{prefix}_hemi-{hemi}_bucket.func.gii") + cmd = ( + f"3dREMLfit -input {i} {mask} -matrix {design_matrix_file} " + f"-Rbuck {out_file} {extra_3dremlfit_args}" + ) + run_cmd(cmd, cwd=tmp_dir) + bucket_file.append(out_file) + else: + bucket_file.append(None) + # volume + if volume_file is not None: + mask = f"-mask {volume_mask_file}" if volume_mask_file is not None else "" + out_file = Path(tmp_dir, f"{prefix}_volume_bucket.nii.gz") + cmd = ( + f"3dREMLfit -input {volume_file} {mask} -matrix {design_matrix_file} " + f"-Rbuck {out_file} {extra_3dremlfit_args} " + ) + run_cmd(cmd, cwd=tmp_dir) + bucket_file.append(out_file) + else: + bucket_file.append(None) + + # Merge surface and volume data back to CIFTI + out_file = Path(out_dir).joinpath(f"{prefix}_bucket.dscalar.nii") + cmd = f"wb_command -disable-provenance -cifti-create-dense-scalar {out_file} " + if bucket_file[0] is not None: + cmd += f"-left-metric {bucket_file[0]} " + if left_roi_file is not None: + cmd += f"-roi-left {left_roi_file} " + if bucket_file[1] is not None: + cmd += f"-right-metric {bucket_file[1]} " + if right_roi_file is not None: + cmd += f"-roi-right {right_roi_file} " + if bucket_file[2] is not None: + cmd += f"-volume {bucket_file[2]} {volume_label_file}" + run_cmd(cmd) + if debug: + debug_dir = Path(out_dir).joinpath("debug", prefix) + shutil.copytree(tmp_dir, debug_dir, dirs_exist_ok=True) + + return out_file diff --git a/pantheon/glm/regressor.py b/pantheon/glm/regressor.py new file mode 100644 index 0000000..c2fce5c --- /dev/null +++ b/pantheon/glm/regressor.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Functions relate to GLM regressor.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from __future__ import annotations +import numpy as np + + +def make_run_start_string(run_length: list[int]) -> str: + """Generates run start index string. + + This function makes a string contains the TR index of the start of + each run. It is used for the '-concat' option in AFNI's 3dDeconvolve + program. + + Args: + run_length: A list of number indicates the length (number of TR) + of each run. + + Returns: + A string contains run start index which could be used in AFNI's + 3dDeconvolve program. + """ + + run_start = np.cumsum(np.insert(run_length, 0, 0), dtype=np.int16)[:-1] + run_start = "1D: " + " ".join([str(i) for i in run_start]) + return run_start diff --git a/pantheon/image/__init__.py b/pantheon/image/__init__.py new file mode 100644 index 0000000..a5682fb --- /dev/null +++ b/pantheon/image/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/pantheon/image/cifti.py b/pantheon/image/cifti.py new file mode 100644 index 0000000..ed82b0b --- /dev/null +++ b/pantheon/image/cifti.py @@ -0,0 +1,762 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Functions relate to CIFTI file manipulation.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from __future__ import annotations +from typing import Union, Optional, Literal, Any +from pathlib import Path +import numpy as np +import nibabel as nib +import nilearn.image as nli +import nilearn.masking as nlm +from scipy.stats import zscore + +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +################################ +# Manipulate CIFTI python object +################################ + + +def decompose_dscalar( + img: nib.cifti2.Cifti2Image, dtype: Any = np.float32 +) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """ + Splits a CIFTI dscalar image into surface and volume parts. + + Note: By default, CIFTI file represents samples in columns. + + Args: + img: A nib.cifti2.Cifti2Image object. + dtype: Data type of the splitted data. + + Returns: + A dict contains splitted CIFTI data. The keys are SurfaceL, + SurfaceR, and Volume. + """ + + data = {"SurfaceL": None, "SurfaceR": None, "Volume": None} + # Read data in CIFTI file + data_cifti = img.get_fdata(dtype=np.float64).astype(dtype) + # Get BrainModel axis (assume the last dim for dscalar or dtseries) + axis_bm = img.header.get_axis(1) + # Split data for each brain structure + if "CIFTI_STRUCTURE_CORTEX_LEFT" in axis_bm.name: + data["SurfaceL"] = get_surf_data_from_cifti( + data_cifti, axis_bm, "CIFTI_STRUCTURE_CORTEX_LEFT" + ) + if "CIFTI_STRUCTURE_CORTEX_RIGHT" in axis_bm.name: + data["SurfaceR"] = get_surf_data_from_cifti( + data_cifti, axis_bm, "CIFTI_STRUCTURE_CORTEX_RIGHT" + ) + if axis_bm.volume_shape is not None: + data["Volume"] = get_vol_img_from_cifti(data_cifti, axis_bm) + return data + + +def decompose_dtseries( + img: nib.cifti2.Cifti2Image, dtype: Any = np.float32 +) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """ + Splits a CIFTI dtseries image into surface and volume parts. + + Note: By default, CIFTI file represents samples in columns. + + Args: + img: A nib.cifti2.Cifti2Image object. + dtype: Data type of the splitted data. + + Returns: + A dict contains splitted CIFTI data. The keys are SurfaceL, + SurfaceR, and Volume. + """ + return decompose_dscalar(img, dtype=dtype) + + +def decompose_dlabel( + img: nib.cifti2.Cifti2Image, dtype: Any = np.int16 +) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """ + Splits a CIFTI dlabel image into surface and volume parts. + + Args: + img: A nib.cifti2.Cifti2Image object. + dtype: Data type of the splitted data. + + Returns: + A dict contains splitted CIFTI data. The keys are SurfaceL, + SurfaceR, and Volume. + """ + return decompose_dscalar(img, dtype=dtype) + + +def read_dscalar( + in_file: Union[PathLike, list[PathLike]], + volume_as_img: bool = False, + standardize: Optional[Literal["zscore"]] = None, + dtype: Any = np.float32, +) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """Reads CIFTI dscalar files into surface and volume data array. + + Multiple input files will be concatenated along the 1st dim (row). + + Args: + in_file: A single or a list of CIFTI dscalar files. + volume_as_img: If true, the volume part in the CIFTI image is + extracted as a nib.nifti1.Nifti1Image object. If false, it's + extracted as a numpy array. + standardize: Standardize each vertex/voxel along the time + dimension. Valid: zscore. + dtype: Data type of the returned data. + + Returns: + A dict contains splitted CIFTI data. The keys are SurfaceL, + SurfaceR, and Volume. If in_file is a list of filenames, the + output is a list of dicts. + + Raises: + TypeError: Input file is not a CIFTI file. + """ + + file_list = [in_file] if not isinstance(in_file, list) else in_file + # Loop for each in_file + data = {"SurfaceL": [], "SurfaceR": [], "Volume": []} + for f in file_list: + # Read CIFTI file + img = nib.load(f) + if not isinstance(img, nib.cifti2.Cifti2Image): + raise TypeError(f"File {f} is not a CIFTI file.") + # Decompose CIFTI file + data_cifti = decompose_dscalar(img, dtype=dtype) + # Transpose surface data to make row represents samples and col represents features + # by default, samples are in columns + if data_cifti["SurfaceL"] is not None: + data_cifti["SurfaceL"] = data_cifti["SurfaceL"].T + if data_cifti["SurfaceR"] is not None: + data_cifti["SurfaceR"] = data_cifti["SurfaceR"].T + # Standardize if requested + if standardize == "zscore": + if data_cifti["SurfaceL"] is not None: + data_cifti["SurfaceL"] = zscore(data_cifti["SurfaceL"], axis=0) + if data_cifti["SurfaceR"] is not None: + data_cifti["SurfaceR"] = zscore(data_cifti["SurfaceR"], axis=0) + if data_cifti["Volume"] is not None: + data_cifti["Volume"] = nib.Nifti1Image( + zscore(data_cifti["Volume"].get_fdata(dtype=dtype), axis=3), + data_cifti["Volume"].affine, + ) + data["SurfaceL"].append(data_cifti["SurfaceL"]) + data["SurfaceR"].append(data_cifti["SurfaceR"]) + data["Volume"].append(data_cifti["Volume"]) + # Concatenate multiple input data + if all(i is not None for i in data["SurfaceL"]): + data["SurfaceL"] = np.vstack(data["SurfaceL"]) + else: + data["SurfaceL"] = None + if all(i is not None for i in data["SurfaceR"]): + data["SurfaceR"] = np.vstack(data["SurfaceR"]) + else: + data["SurfaceR"] = None + if all(i is not None for i in data["Volume"]): + data["Volume"] = nli.concat_imgs(data["Volume"], dtype=dtype) + else: + data["Volume"] = None + # Extract volume data if requested + if (not volume_as_img) and (data["Volume"] is not None): + # Make a mask volume image contains voxels defined in CIFTI file + # (see func: get_vol_img_from_cifti) + axis_bm = img.header.get_axis(1) + vox_indices = tuple(axis_bm.voxel[axis_bm.volume_mask].T) + mask_vol = np.zeros(axis_bm.volume_shape, dtype=np.int16) + mask_vol[vox_indices] = 1 + mask_img = nib.Nifti1Image(mask_vol, axis_bm.affine) + # Extract volume data into a 2d array using nilearn's apply_mask function + if mask_vol.sum() > 0: + data["Volume"] = nlm.apply_mask(data["Volume"], mask_img) + return data + + +def read_dtseries( + in_file: Union[PathLike, list[PathLike]], + volume_as_img: bool = False, + standardize: Optional[Literal["zscore"]] = None, + dtype: Any = np.float32, +) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """Reads CIFTI dtseries files into surface and volume data array. + + Multiple input files will be concatenated along the 1st dim (row). + + Args: + in_file: A single or a list of CIFTI dtseries files. + volume_as_img: If true, the volume part in the CIFTI image is + extracted as a nib.nifti1.Nifti1Image object. If false, it's + extracted as a numpy array. + standardize: Standardize each vertex/voxel along the time + dimension. Valid: zscore. + dtype: Data type of the returned data. + + Returns: + A dict contains splitted CIFTI data. The keys are SurfaceL, + SurfaceR, and Volume. If in_file is a list of filenames, the + output is a list of dicts. + """ + return read_dscalar(in_file, volume_as_img=volume_as_img, standardize=standardize, dtype=dtype) + + +def read_dscalar_roi( + in_file: Union[PathLike, list[PathLike]], + roi_mask: Union[ + dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + list[dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]], + ], + standardize: Optional[Literal["zscore"]] = None, + single_data_array: bool = True, + dtype: Any = np.float32, +) -> Union[ + np.ndarray, + dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + list[np.ndarray], + list[dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]], +]: + """Reads CIFTI dscalar data within ROI. + + This function could read multiple ROI data at once. It's faster than + a explicit for loop, since this method only reads the whole data + once. In that case, the ROI data will be in a list instead of a + single numpy array of dict. + + Args: + in_file: A single or a list of CIFTI dscalar files. + roi_mask: A (list of) ROI mask dict. It is usually generated by + the 'make_roi_from_spec' function. + standardize: Standardize each vertex/voxel along the time + dimension. Valid: zscore. + single_data_array: If true, concatenating all parts into a + single numpy array along columns. Order: SurfaceL, SurfaceR, + Volume. + dtype: Data type of the returned data. + + Returns: + Depending on the inputs, the returned ROI data could be in + several format. + If the 'single_data_array' option is True (default), the ROI + data will be contained in a numpy array. If it's False, the ROI + data will be in a dict like the roi_mask. + If the 'roi_mask' is a list of ROI mask dict, the data of each + ROI will be in a list, and the order is the same as the + 'roi_mask'. + Multiple input file will always be concatenated along the first + (row) dimension. + """ + + # Read data (a dict with concatenated data) + ds = read_dscalar(in_file, volume_as_img=True, standardize=standardize, dtype=dtype) + # Apply ROI masking + roi_mask = [roi_mask] if not isinstance(roi_mask, list) else roi_mask + data = [] + for mask in roi_mask: + data_roi = {"SurfaceL": None, "SurfaceR": None, "Volume": None} + if mask["SurfaceL"] is not None: + data_roi["SurfaceL"] = ds["SurfaceL"][:, mask["SurfaceL"].astype(np.bool)].copy() + if mask["SurfaceR"] is not None: + data_roi["SurfaceR"] = ds["SurfaceR"][:, mask["SurfaceR"].astype(np.bool)].copy() + if mask["Volume"] is not None: + data_roi["Volume"] = nlm.apply_mask(ds["Volume"], mask["Volume"]) + # Combine SurfaceL, SurfaceR and Volume if requested + if single_data_array: + data_roi_single = [] + for part in ["SurfaceL", "SurfaceR", "Volume"]: + if data_roi[part] is not None: + data_roi_single.append(data_roi[part]) + data_roi = np.hstack(data_roi_single) + data.append(data_roi) + # Extract data from list if there's only one ROI + if len(data) == 1: + data = data[0] + return data + + +def read_dtseries_roi( + in_file: Union[PathLike, list[PathLike]], + roi_mask: dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + standardize: Optional[Literal["zscore"]] = None, + single_data_array: bool = True, + dtype: Any = np.float32, +) -> Union[ + np.ndarray, + dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + list[np.ndarray], + list[dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]], +]: + """Reads CIFTI dtseries data within ROI. + + This function could read multiple ROI data at once. It's faster than + a explicit for loop, since this method only reads the whole data + once. In that case, the ROI data will be in a list instead of a + single numpy array of dict. + + Args: + in_file: A single or a list of CIFTI dtseries files. + roi_mask: A (list of) ROI mask dict. It is usually generated by + the 'make_roi_from_spec' function. + standardize: Standardize each vertex/voxel along the time + dimension. Valid: zscore. + single_data_array: If true, concatenating all parts into a + single numpy array along columns. Order: SurfaceL, SurfaceR, + Volume. + dtype: Data type of the returned data. + + Returns: + Depending on the inputs, the returned ROI data could be in + several format. + If the 'single_data_array' option is True (default), the ROI + data will be contained in a numpy array. If it's False, the ROI + data will be in a dict like the roi_mask. + If the 'roi_mask' is a list of ROI mask dict, the data of each + ROI will be in a list, and the order is the same as the + 'roi_mask'. + Multiple input file will always be concatenated along the first + (row) dimension. + """ + + return read_dscalar_roi( + in_file, + roi_mask, + standardize=standardize, + single_data_array=single_data_array, + dtype=dtype, + ) + + +def get_vol_img_from_cifti( + cifti_data: np.ndarray, axis: nib.cifti2.BrainModelAxis +) -> nib.nifti1.Nifti1Image: + """ + Extracts volume data as a volume image from CIFTI BrainModel. + + Args: + cifti_data: A 2d numpy array represents brain data in a CIFTI + file. The BrainModel(vertice/voxel) is in the 2nd dim + (dim=1). + axis: A BrainModelAxis object from a CIFTI file. + + Returns: + A nib.nifti1.Nifti1Image object. + + Raises: + TypeError: Argument axis is not a nib.cifti2.BrainModelAxis + object. + """ + + if not isinstance(axis, nib.cifti2.BrainModelAxis): + raise TypeError("Argument axis is not a nib.cifti2.BrainModelAxis") + # Find volume voxels + vol_mask = axis.volume_mask + vox_indices = tuple(axis.voxel[vol_mask].T) # ([x0, x1, ...], [y0, ...], [z0, ...]) + # Extract data from volume voxels and make a 4d/3d array + cifti_data = cifti_data.T[vol_mask] # Assume brainmodels axis is last, move it to front + vol_data = np.zeros(axis.volume_shape + cifti_data.shape[1:], dtype=cifti_data.dtype) + vol_data[vox_indices] = cifti_data # "Fancy indexing" + return nib.Nifti1Image(vol_data, axis.affine) + + +def get_surf_data_from_cifti( + cifti_data: np.ndarray, + axis: nib.cifti2.BrainModelAxis, + surf_name: Literal["CIFTI_STRUCTURE_CORTEX_LEFT", "CIFTI_STRUCTURE_CORTEX_RIGHT"], +) -> np.ndarray: + """ + Extracts surface data as a numpy array from CIFTI BrainModel. + + Args: + cifti_data: A 2d numpy array represents brain data in a CIFTI + file. The BrainModel(vertice/voxel) is in the 2nd dim + (dim=1). + axis: A BrainModelAxis object from a CIFTI file. + surf_name: The surface name. + Valid: "CIFTI_STRUCTURE_CORTEX_LEFT", + "CIFTI_STRUCTURE_CORTEX_RIGHT". + + Returns: + A numpy array contains surface data. + + Raises: + TypeError: Argument axis is not a nib.cifti2.BrainModelAxis + object. + ValueError: The surface name is not valid. + """ + + if not isinstance(axis, nib.cifti2.BrainModelAxis): + raise TypeError("Argument axis is not a nib.cifti2.BrainModelAxis") + if surf_name not in ["CIFTI_STRUCTURE_CORTEX_LEFT", "CIFTI_STRUCTURE_CORTEX_RIGHT"]: + raise ValueError( + f"Surface name {surf_name} is not supported.\n" + "Valid values: CIFTI_STRUCTURE_CORTEX_LEFT, CIFTI_STRUCTURE_CORTEX_RIGHT" + ) + # Loop through brain structures + # Surface name should be CIFTI_STRUCTURE_CORTEX_LEFT or CIFTI_STRUCTURE_CORTEX_RIGHT + for name, data_indices, model in axis.iter_structures(): + if name == surf_name: + # Find vertex + vtx_indices = model.vertex + # Extract data from surface vertices and make a 2d/1d array + # assume brainmodels axis is last, move it to front + cifti_data = cifti_data.T[data_indices] + surf_data = np.zeros( + (vtx_indices.max() + 1,) + cifti_data.shape[1:], dtype=cifti_data.dtype + ) + surf_data[vtx_indices] = cifti_data + + # Returned data has vertices index in the 1st dim (dim=0) + return surf_data + + +################### +# Create CIFTI file +################### + + +def make_dense_scalar( + out_file: PathLike, + left_surf_file: Optional[PathLike] = None, + right_surf_file: Optional[PathLike] = None, + left_roi_file: Optional[PathLike] = None, + right_roi_file: Optional[PathLike] = None, + volume_file: Optional[PathLike] = None, + volume_label_file: Optional[PathLike] = None, + cifti_map_name: Optional[str] = "", +) -> Path: + """Combines surface and volume data to make a CIFTI dscalar file. + + Args: + out_file: Output CIFTI dscalar file. + left_surf_file: Left surface GIFTI file. + right_surf_file: Right surface GIFTI file. + left_roi_file: Left surface mask file. + right_roi_file: Right surfce mask file. + volume_file: Volume NIFTI file. + volume_label_file: Volume structure label file. + cifti_map_name: CIFTI image map name. + + Returns: + A CIFTI dscalar file. + + Raises: + ValueError: The volume_label_file is None when volume_file is + specified. + """ + + cmd = f"wb_command -disable-provenance -cifti-create-dense-scalar {out_file} " + if left_surf_file is not None: + cmd += f"-left-metric {left_surf_file} " + if left_roi_file is not None: + cmd += f"-roi-left {left_roi_file} " + if right_surf_file is not None: + cmd += f"-right-metric {right_surf_file} " + if right_roi_file is not None: + cmd += f"-roi-right {right_roi_file} " + if volume_file is not None: + if volume_label_file is not None: + cmd += f"-volume {volume_file} {volume_label_file}" + else: + raise ValueError("If volume_file is provided, volume_label_file is also required.") + print(f"Creating dense scalar file: {out_file} ...", flush=True) + run_cmd(cmd) + # Set metadata + run_cmd(f"wb_command -disable-provenance -set-map-names {out_file} -map 1 {cifti_map_name}") + return Path(out_file) + + +def make_dense_label( + out_file: PathLike, + left_surf_file: Optional[PathLike] = None, + right_surf_file: Optional[PathLike] = None, + left_roi_file: Optional[PathLike] = None, + right_roi_file: Optional[PathLike] = None, + cifti_map_name: str = "", +) -> Path: + """Combines L and R surface data to make a CIFTI dlabel file. + + Args: + out_file: Output CIFTI dlabel file. + left_surf_file: Left surface GIFTI file. + right_surf_file: Right surface GIFTI file. + left_roi_file: Left surface mask file. + right_roi_file: Right surfce mask file. + cifti_map_name: CIFTI image map name. + + Returns: + A CIFTI dlabel file. + + Raises: + ValueError: Left or right ROI file is not found. + """ + + if (left_surf_file is not None) and (left_roi_file is None): + raise ValueError("If left_surf_file is provided, left_roi_file is also required.") + if (right_surf_file is not None) and (right_roi_file is None): + raise ValueError("If right_surf_file is provided, right_roi_file is also required.") + + cmd = f"wb_command -logging SEVERE -disable-provenance -cifti-create-label {out_file} " + if left_surf_file is not None: + cmd += f"-left-label {left_surf_file} -roi-left {left_roi_file} " + if right_surf_file is not None: + cmd += f"-right-label {right_surf_file} -roi-right {right_roi_file}" + print(f"Creating dense label file: {out_file} ...", flush=True) + run_cmd(cmd) + # Set metadata + run_cmd(f"wb_command -disable-provenance -set-map-names {out_file} -map 1 {cifti_map_name}") + return Path(out_file) + + +def make_dense_timeseries( + out_file: PathLike, + timestep: float, + left_surf_file: Optional[PathLike] = None, + right_surf_file: Optional[PathLike] = None, + left_roi_file: Optional[PathLike] = None, + right_roi_file: Optional[PathLike] = None, + volume_file: Optional[PathLike] = None, + volume_label_file: Optional[PathLike] = None, +) -> Path: + """Combines surface and volume data to make a CIFTI dtseries file. + + Args: + out_file: Output CIFTI dtseries file. + timestep: Repetition time (TR). + left_surf_file: Left surface GIFTI file. + right_surf_file: Right surface GIFTI file. + left_roi_file: Left surface mask file. + right_roi_file: Right surfce mask file. + volume_file: Volume NIFTI file. + volume_label_file: Volume structure label file. + + Returns: + A CIFTI dtseries file. + + Raises: + ValueError: The volume_label_file is None when volume_file is + specified. + """ + + cmd = ( + "wb_command -disable-provenance -cifti-create-dense-timeseries " + f"{out_file} -timestep {timestep} " + ) + if left_surf_file is not None: + cmd += f"-left-metric {left_surf_file} " + if left_roi_file is not None: + cmd += f"-roi-left {left_roi_file} " + if right_surf_file is not None: + cmd += f"-right-metric {right_surf_file} " + if right_roi_file is not None: + cmd += f"-roi-right {right_roi_file} " + if volume_file is not None: + if volume_label_file is not None: + cmd += f"-volume {volume_file} {volume_label_file}" + else: + raise ValueError("If volume_file is provided, volume_label_file is also required.") + print(f"Creating dense timeseries file: {out_file} ...", flush=True) + run_cmd(cmd) + return Path(out_file) + + +####################### +# Manipulate CIFTI file +####################### + + +def concat_dtseries(file_list: list[PathLike], out_file: PathLike) -> Path: + """Concatenates a list of CIFTI dtseries files to a single file. + + Args: + file_list: A list of CIFTI dtseries files. + out_file: Output CIFTI dtseries file. + + Returns: + A CIFTI dtseries file. + """ + + cmd = f"wb_command -disable-provenance -cifti-merge {out_file} " + for i in file_list: + cmd += f"-cifti {i} " + run_cmd(cmd) + return Path(out_file) + + +def concat_dscalar(file_list: list[PathLike], out_file: PathLike) -> Path: + """Concatenates a list of CIFTI dscalar files to a single file. + + Args: + file_list: A list of CIFTI dscalar files. + out_file: Output CIFTI dscalar file. + + Returns: + A CIFTI dscalar file. + """ + return concat_dtseries(file_list, out_file) + + +def split_dtseries( + in_file: PathLike, + left_surf_out_file: Optional[PathLike] = None, + right_surf_out_file: Optional[PathLike] = None, + volume_out_file: Optional[PathLike] = None, + volume_mask_out_file: Optional[PathLike] = None, +) -> list[Optional[Path]]: + """ + Splits a CIFTI dtseries file to GIFTI and NIFTI files. + + Args: + left_surf_out_file: Left surface output GIFTI file. + right_surf_out_file: Right surface output GIFTI file. + volume_out_file: Volume output file. + volume_mask_out_file: Volume structure label output file. + + Returns: + A list of files. The files are in the following order: left + surface GIFTI, right surface GIFTI, and subcortical NIFTI + (Optional: subcortrical mask NIFTI). Nonexistent part is None. + + Raises: + ValueError: None of the output file is specified. + """ + + if ( + (left_surf_out_file is None) + and (right_surf_out_file is None) + and (volume_out_file is None) + ): + raise ValueError("At least one output file is required.") + out_file = [None, None, None, None] + lh_cmd, rh_cmd, volume_cmd, volume_mask_cmd = "", "", "", "" + if left_surf_out_file is not None: + lh_cmd = f"-metric CORTEX_LEFT {left_surf_out_file}" + out_file[0] = Path(left_surf_out_file) + if right_surf_out_file is not None: + rh_cmd = f"-metric CORTEX_RIGHT {right_surf_out_file}" + out_file[1] = Path(right_surf_out_file) + if volume_out_file is not None: + volume_cmd = f"-volume-all {volume_out_file}" + out_file[2] = Path(volume_out_file) + if volume_mask_out_file is not None: + volume_mask_cmd = f"-roi {volume_mask_out_file}" + out_file[3] = Path(volume_mask_out_file) + cmd = ( + f"wb_command -disable-provenance -cifti-separate {in_file} COLUMN " + f"{lh_cmd} {rh_cmd} {volume_cmd} {volume_mask_cmd}" + ) + run_cmd(cmd) + return out_file + + +def split_dscalar( + in_file: PathLike, + left_surf_out_file: Optional[PathLike] = None, + right_surf_out_file: Optional[PathLike] = None, + volume_out_file: Optional[PathLike] = None, + volume_mask_out_file: Optional[PathLike] = None, +) -> list[Optional[Path]]: + """ + Splits a CIFTI dscalar file to GIFTI and NIFTI files. + + Args: + left_surf_out_file: Left surface output GIFTI file. + right_surf_out_file: Right surface output GIFTI file. + volume_out_file: Volume output file. + volume_mask_out_file: Volume structure label output file. + + Returns: + A list of files. The files are in the following order: left + surface GIFTI, right surface GIFTI, and subcortical NIFTI + (Optional: subcortrical mask NIFTI). Nonexistent part is None. + + Raises: + ValueError: None of the output file is specified. + """ + + out_file = split_dtseries( + in_file, + left_surf_out_file=left_surf_out_file, + right_surf_out_file=right_surf_out_file, + volume_out_file=volume_out_file, + volume_mask_out_file=volume_mask_out_file, + ) + return out_file + + +def extract_dscalar_map(in_file: PathLike, out_file: PathLike, mapname: str) -> Path: + """ + Extracts a map from a CIFTI dscalar file based on map name. + + Args: + in_file: CIFTI dscalar file. + out_file: Output CIFTI dscalar file. + mapname: Name of the to be extracted map. + + Returns: + A CIFTI dscalar file contains required map. + + Raises: + ValueError: Map name is not found in the input CIFTI file. + """ + + # Get map names from input file + name_list = get_dscalar_map_name(in_file) + if mapname not in name_list: + raise ValueError(f"Map '{mapname}' is not found in {in_file}.") + # Extract selected map + idx = name_list.index(mapname) + 1 + cmd = ( + f"wb_command -disable-provenance -cifti-math 'a' {out_file} " + f"-var 'a' {in_file} -select 1 {idx}" + ) + run_cmd(cmd, print_output=False) + # Set proper map name + set_dscalar_map_name(out_file, mapname, 1) + return Path(out_file) + + +def get_dscalar_map_name(in_file: PathLike) -> list[str]: + """ + Gets map name from a CIFTI dscalar file. + + Args: + in_file: CIFTI dscalar file. + + Returns: + A list of map names of the input dscalar file. + """ + + cmd = ["wb_command", "-file-information", in_file, "-only-map-names"] + map_name = run_cmd(cmd, print_output=False).stdout.split() + return map_name + + +def set_dscalar_map_name(in_file: PathLike, map_name: str, map_index: int) -> Path: + """ + Sets CIFTI dscalar map name. + + Args: + in_file: CIFTI dscalar file. + map_name: Map name to be set. + map_index: Index of the dscalar map. + + Returns: + A CIFTI dscalar file. This is an inplace operation and the + output filename is the same as the input. + """ + + cmd = ( + f"wb_command -disable-provenance -set-map-names {in_file} " + f"-map {str(map_index)} {map_name}" + ) + run_cmd(cmd) + return Path(in_file) diff --git a/pantheon/image/gifti.py b/pantheon/image/gifti.py new file mode 100644 index 0000000..433fb38 --- /dev/null +++ b/pantheon/image/gifti.py @@ -0,0 +1,353 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Functions relate to GIFTI file manipulation.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from typing import Optional, Union, Literal, Any +from pathlib import Path +import numpy as np +import nibabel as nib + +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +################### +# Create GIFTI file +################### + + +def make_gifti_image( + data: Union[np.ndarray, list[np.ndarray]], + structure: str, + intent: str = "NIFTI_INTENT_NONE", + datatype: str = "NIFTI_TYPE_FLOAT32", + kw_args_image: dict[str, Any] = {}, + kw_args_darray: dict[str, Any] = {}, +) -> nib.gifti.gifti.GiftiImage: + """ + Makes GIFTI image. + + Args: + data: Data array in GIFTI file. It could be a numpy array or a + list of numpy array. + structure: Primary structure metadata of GIFTI image. Usually, + it could be Cortex_Left or Cortex_Right. + intent: GIFTI file intent type. + datatype: GIFTI file data type. + kw_args_image: Additional keyword arguments pass to nibabel + GiftiImage class. Image metadata should be stored in an item + which key is 'meta'. + kw_args_darray: Additional keyword arguments pass to nibabel + GiftiDataArray class. Data array metadata should be stored + in an item which key is 'meta'. + + Returns: + A nib.gifti.gifti.GiftiImage object. + """ + + # Conform input + data = [data] if not isinstance(data, list) else data + # Set image metadata + if "meta" in kw_args_image.keys(): + meta_dict = kw_args_image["meta"].metadata + meta_dict.update({"AnatomicalStructurePrimary": structure}) + else: + meta_dict = {"AnatomicalStructurePrimary": structure} + kw_args_image["meta"] = nib.gifti.gifti.GiftiMetaData().from_dict(meta_dict) + # Initialize GIFTI image object + img = nib.gifti.gifti.GiftiImage(**kw_args_image) + # Add data array + for da in data: + da = nib.gifti.gifti.GiftiDataArray( + data=da, intent=intent, datatype=datatype, **kw_args_darray + ) + img.add_gifti_data_array(da) + return img + + +def make_gifti_label_image( + data: Union[np.ndarray, list[np.ndarray]], + structure: str, + label: dict[str, dict[str, Union[str, int, float]]], + kw_args_image: dict[str, Any] = {}, + kw_args_darray: dict[str, Any] = {}, +) -> nib.gifti.gifti.GiftiImage: + """ + Makes GIFTI label image. + + Args: + data: Data array in GIFTI file. It could be a numpy array or a + list of numpy array. + structure: Primary structure metadata of GIFTI image. Usually, + it could be CortexLeft or CortexRight. + label: Lookup table of the labels. It should be a dict in + the format of {label_name: {key:key, red:value, green:value, + blue:value, alpha:value}}. + kw_args_image: Additional keyword arguments pass to nibabel + GiftiImage class. Image metadata should be stored in an item + which key is 'meta'. + kw_args_darray: Additional keyword arguments pass to nibabel + GiftiDataArray class. Data array metadata should be stored + in an item which key is 'meta'. + + Returns: + A nib.gifti.gifti.GiftiImage object. + """ + + # Conform input + data = [data] if not isinstance(data, list) else data + # Set image metadata + if "meta" in kw_args_image.keys(): + meta_dict = kw_args_image["meta"].metadata + meta_dict.update({"AnatomicalStructurePrimary": structure}) + else: + meta_dict = {"AnatomicalStructurePrimary": structure} + kw_args_image["meta"] = nib.gifti.gifti.GiftiMetaData().from_dict(meta_dict) + # Initialize GIFTI image object + img = nib.gifti.gifti.GiftiImage(**kw_args_image) + # Set LabelTable + label_table = nib.gifti.gifti.GiftiLabelTable() + for label_name, attr in label.items(): + label = nib.gifti.gifti.GiftiLabel(**attr) + label.label = label_name + label_table.labels.append(label) + img.labeltable = label_table + # Add data array + for da in data: + da = nib.gifti.gifti.GiftiDataArray( + data=da, intent="NIFTI_INTENT_LABEL", datatype="NIFTI_TYPE_INT32", **kw_args_darray + ) + img.add_gifti_data_array(da) + return img + + +def sanitize_gii_metadata( + in_file: PathLike, + out_file: PathLike, + gim_atr: dict[str, str] = {}, + gim_meta: dict[str, str] = {}, + da_atr: dict[str, str] = {}, + da_meta: dict[str, str] = {}, + clean_provenance: bool = False, +) -> Path: + """ + Cleanup metadata and validate GIFTI file. + + Args: + in_file: GIFTI file. + out_file: Output GIFTI file. + gim_atr: GIFTI image attribute. It could be used for adding new + attributes or modifying existed ones. + gim_meta: GIFTI image metadata. It could be used for adding new + attributes or modifying existed ones. + da_atr: GIFTI data array attribute. It could be used for adding + new attributes or modifying existed ones. + da_meta: GIFTI data array metadata. It could be used for adding + new attributes or modifying existed ones. + clean_provenance: Remove provenance data (usually generated by + HCP Workbench). + + Returns: + A GIFTI file. This is an inplace operation and the output + filename is the same as the input. + """ + + # Fix Gifti image metadata `Version` + # When gifti file is processed by wb_command, the field `version` + # in Gifti image metadata will be set to 1. This will cause error + # when loading data to Freeview. Fix by setting it to 1.0. + sanitize_cmd = [ + "gifti_tool", + "-infile", + in_file, + "-write_gifti", + out_file, + "-mod_gim_atr", + "Version", + "1.0", + ] + # Replace user specified fields + if gim_atr: + for key, value in gim_atr.items(): + sanitize_cmd += ["-mod_gim_atr", key, value] + if gim_meta: + for key, value in gim_meta.items(): + sanitize_cmd += ["-mod_gim_meta", key, value] + if da_atr: + for key, value in da_atr.items(): + sanitize_cmd += ["-mod_DA_atr", key, value] + if da_meta: + for key, value in da_meta.items(): + sanitize_cmd += ["-mod_DA_meta", key, value] + run_cmd(sanitize_cmd) + # Cleanup provenance metadata added by wb_command + if clean_provenance: + wb_gim_meta = { + "ProgramProvenance": "", + "Provenance": "", + "WorkingDirectory": "", + } + for key, value in wb_gim_meta.items(): + sanitize_cmd += ["-mod_gim_meta", key, value] + # Verify output gifti file + run_cmd(["gifti_tool", "-infile", out_file, "-gifti_test"]) + return Path(out_file) + + +####################### +# Manipulate GIFTI file +####################### + + +def resample_surface( + surf_file: PathLike, + current_sphere_file: PathLike, + target_sphere_file: PathLike, + out_file: PathLike, +) -> Path: + """Resamples surface mesh to target space. + + Args: + surf_file: Surface mesh file. (e.g., native mesh in native + space) + current_sphere_file: Sphere surface file with the mesh that the + surf_file is currently on. (e.g., native mesh in fsLR space) + target_sphere_file: Sphere surface file that is in register with + current_sphere_file and has the desired output mesh. (e.g., + 164k mesh in fsLR space) + out_file: Output surface mesh file. + + Returns: + A resampled surface mesh file. + """ + + # Resample + run_cmd( + f"wb_command -disable-provenance -surface-resample {surf_file} " + f"{current_sphere_file} {target_sphere_file} BARYCENTRIC {out_file}" + ) + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + + return Path(out_file) + + +def resample_metric( + metric_file: PathLike, + current_sphere_file: PathLike, + target_sphere_file: PathLike, + out_file: PathLike, + current_area_surf_file: Optional[PathLike] = None, + target_area_surf_file: Optional[PathLike] = None, + roi_file: Optional[PathLike] = None, + resample_method: Literal["ADAP_BARY_AREA", "BARYCENTRIC"] = "ADAP_BARY_AREA", +) -> Path: + """Resamples surface metric to target space. + + Args: + metric_file: Surface metric file. (e.g., native sulc in native + space) + current_sphere_file: Sphere surface file with the mesh that the + surf_file is currently on. (e.g., native mesh in fsLR space) + target_sphere_file: Sphere surface file that is in register with + current_sphere_file and has the desired output mesh. (e.g., + 164k mesh in fsLR space) + out_file: Output surface metric file. + current_area_surf_file: Surface used for vertex area correction. + The mesh of this surface should match current_sphere_file. + (e.g., midthickness in native space with native mesh) + target_area_surf_file: Surface used for vertex area correction. + The mesh of this surface should match target_sphere_file. + (e.g., midthickness in fsLR space with 164k mesh) + roi_file: Surface mask file applies to current_sphere_file. + resample_method: Resample method. ADAP_BARY_AREA or BARYCENTRIC. + + Returns: + A resampled surface metric file. + + Raises: + ValueError: Unrecognized resample method. + """ + + # Parse resample method + if resample_method not in ["ADAP_BARY_AREA", "BARYCENTRIC"]: + raise ValueError("Unrecognized resample method. Valid: ADAP_BARY_AREA, BARYCENTRIC.") + + # Resample + cmd = ( + f"wb_command -disable-provenance -metric-resample " + f"{metric_file} {current_sphere_file} {target_sphere_file} " + f"{resample_method} {out_file} " + ) + if resample_method == "ADAP_BARY_AREA": + cmd += f"-area-surfs {current_area_surf_file} {target_area_surf_file} " + if resample_method == "BARYCENTRIC": + cmd += f"-largest " + if roi_file: + cmd += f"-current-roi {roi_file}" + run_cmd(cmd) + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + + return Path(out_file) + + +def resample_label( + label_file: PathLike, + current_sphere_file: PathLike, + target_sphere_file: PathLike, + out_file: PathLike, + current_area_surf_file: Optional[PathLike] = None, + target_area_surf_file: Optional[PathLike] = None, + resample_method: Literal["ADAP_BARY_AREA", "BARYCENTRIC"] = "ADAP_BARY_AREA", +) -> Path: + """Resamples surface label to target space. + + Args: + label_file: Surface label file. (e.g., native aparc in native + space) + current_sphere_file: Sphere surface file with the mesh that the + surf_file is currently on. (e.g., native mesh in fsLR space) + target_sphere_file: Sphere surface file that is in register with + current_sphere_file and has the desired output mesh. (e.g., + 164k mesh in fsLR space) + out_file: Output surface label file. + current_area_surf_file: Surface used for vertex area correction. + The mesh of this surface should match current_sphere_file. + (e.g., midthickness in native space with native mesh) + target_area_surf_file: Surface used for vertex area correction. + The mesh of this surface should match target_sphere_file. + (e.g., midthickness in fsLR space with 164k mesh) + resample_method: Resample method. ADAP_BARY_AREA or BARYCENTRIC. + + Returns: + A resampled surface label file. + + Raises: + ValueError: Unrecognized resample method. + """ + + # Parse resample method + if resample_method not in ["ADAP_BARY_AREA", "BARYCENTRIC"]: + raise ValueError("Unrecognized resample method. Valid: ADAP_BARY_AREA, BARYCENTRIC.") + + # Resample + cmd = ( + f"wb_command -disable-provenance -label-resample " + f"{label_file} {current_sphere_file} {target_sphere_file} " + f"{resample_method} {out_file} " + ) + if resample_method == "ADAP_BARY_AREA": + cmd += f"-area-surfs {current_area_surf_file} {target_area_surf_file}" + if resample_method == "BARYCENTRIC": + cmd += f"-largest " + run_cmd(cmd) + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + + return Path(out_file) diff --git a/pantheon/image/nifti.py b/pantheon/image/nifti.py new file mode 100644 index 0000000..8f3c623 --- /dev/null +++ b/pantheon/image/nifti.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Functions relate to NIFTI file manipulation.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: diff --git a/pantheon/masking/__init__.py b/pantheon/masking/__init__.py new file mode 100644 index 0000000..a5682fb --- /dev/null +++ b/pantheon/masking/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/pantheon/masking/roi.py b/pantheon/masking/roi.py new file mode 100644 index 0000000..7c4c329 --- /dev/null +++ b/pantheon/masking/roi.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Functions relate to ROI manipulation.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from __future__ import annotations +from typing import Union, Optional, Literal, Any +import logging +import numpy as np +import nibabel as nib +import nilearn.masking as nlm + +from ..image.cifti import decompose_dlabel +from ..utils.validation import parse_roi_id +from ..utils.typing import PathLike + + +def make_mask_from_index( + data: np.ndarray, index_list: list[int], dtype: Literal["int", "bool"] = "int" +) -> np.ndarray: + """Makes a binary mask array from a list of index values. + + Args: + data: Input data array. Data type of this array should be int. + index_list: A list of int. Each value in this list selects + elements in data which match this value. + dtype: Data type of the returned array. Default is int. If the + returned array is used as a mask to index other arrays, it + should be in bool type. + + Returns: + A binary numpy array with same shape as data. Elements in + data match any value in index_list are 1 in this array. And all + other elements are 0. + + Raises: + TypeError: data or index_list doesn't have the correct + type. + ValueError: dtype is not int or bool. + """ + + if not np.issubdtype(data.dtype, np.integer): + raise TypeError(f"Argument data should have int data type.") + if not (isinstance(index_list, list) and all(isinstance(i, int) for i in index_list)): + raise TypeError("Argument index_list should be a list of int.") + if dtype not in ["int", "bool"]: + raise ValueError(f"Argument dtype is {dtype}. Valid: int, bool.") + + mask = np.zeros_like(data) + for idx in index_list: + mask += np.where(data == idx, 1, 0) + mask = np.where(mask > 0, 1, 0).astype(dtype) + return mask + + +def make_roi_from_spec( + roi_id: str, + roi_spec: dict[str, dict[str, Any]], + atlas_file: list[Optional[PathLike]], +) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """Makes ROI mask based on given ROI specification. + + Args: + roi_id: ROI name. + roi_spec: ROI specification. Usually it generates by the + 'read_roi_spec' function. + atlas_file: Atlas files used for creating ROI. It should be a + list of filenames corresponding to left and right brain + hemisphere. Either file could be None. Both hemispheres + could be the same file. + + Returns: + A dict with 3 items. The keys are SurfaceL, SurfaceR and + Volume, corresponding to the left, right brain hemisphere + and the volume part. Usually a ROI could be either in + surface or volume format, but not both. + Surface mask is represented in a numpy array. Volume mask is + represented in a nib.nifti1.Nifti1Image image. + + Raises: + TypeError: atlas_file is not a list. + ValueError: File of requested hemisphere is None in atlas_file. + ValueError: ROIType field of the ROI specification is invalid. + """ + + if not isinstance(atlas_file, list): + raise TypeError( + "Argument atlas_file should be a list of two filenames " + "correspoding to L and R hemispheres. (could be None or same file)" + ) + roi_id, hemi = parse_roi_id(roi_id) + if (hemi == "L" or hemi == "LR") and (atlas_file[0] is None): + raise ValueError("Atlas file of left hemisphere is None.") + if (hemi == "R" or hemi == "LR") and (atlas_file[1] is None): + raise ValueError("Atlas file of right hemisphere is None.") + + spec = roi_spec[roi_id] + roi_type = spec["ROIType"] + roi_mask = {"Volume": None, "SurfaceL": None, "SurfaceR": None} + # Volume + if roi_type == "Volume": + # load atlas data + atlas_data = [None, None] + for i in [0, 1]: + if atlas_file[i]: + # for mask image creation, assume both atlas images have same header + atlas_img = nib.load(atlas_file[i]) + if not isinstance(atlas_img, nib.nifti1.Nifti1Image): + raise ValueError("For volume ROI, only support NIFTI atlas file.") + atlas_data[i] = atlas_img.get_fdata().astype(np.int16) + # make a mask from index + if hemi == "L": + mask = make_mask_from_index(atlas_data[0], spec["IndexL"]) + if hemi == "R": + mask = make_mask_from_index(atlas_data[1], spec["IndexR"]) + if hemi == "LR": + mask_lh = make_mask_from_index(atlas_data[0], spec["IndexL"]) + mask_rh = make_mask_from_index(atlas_data[1], spec["IndexR"]) + mask = mask_lh + mask_rh + mask = np.where(mask > 0, 1, 0) + mask_img = nib.Nifti1Image(mask, atlas_img.affine, atlas_img.header) + roi_mask["Volume"] = mask_img + # Surface (GIFTI, CIFTI) + elif roi_type == "Surface": + logging.disable(logging.CRITICAL) # avoid CIFTI reading warning + # load atlas data + atlas_data = [None, None] + for i, part in enumerate(["SurfaceL", "SurfaceR"]): + if atlas_file[i]: + atlas_img = nib.load(atlas_file[i]) + # GIFTI + if isinstance(atlas_img, nib.gifti.gifti.GiftiImage): + atlas_data[i] = atlas_img.agg_data().astype(np.int16) + # CIFTI + elif isinstance(atlas_img, nib.cifti2.Cifti2Image): + atlas_data[i] = decompose_dlabel(atlas_img, dtype=np.int16)[part] + else: + raise ValueError("For surface ROI, only support GIFTI or CIFTI atlas file.") + logging.disable(logging.NOTSET) + # make a mask from index + # for CIFTI file, the vertex is in the 2nd dimension + # use squeeze to make a 1d array as GIFTI file + if hemi == "L": + roi_mask["SurfaceL"] = make_mask_from_index(np.squeeze(atlas_data[0]), spec["IndexL"]) + if hemi == "R": + roi_mask["SurfaceR"] = make_mask_from_index(np.squeeze(atlas_data[1]), spec["IndexR"]) + if hemi == "LR": + roi_mask["SurfaceL"] = make_mask_from_index(np.squeeze(atlas_data[0]), spec["IndexL"]) + roi_mask["SurfaceR"] = make_mask_from_index(np.squeeze(atlas_data[1]), spec["IndexR"]) + else: + raise ValueError( + f"Invalid ROIType of {roi_id} in ROI specification. Valid: Volume, Surface." + ) + + return roi_mask + + +def unmask( + data: np.ndarray, + roi_mask: dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], +) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """Reshapes ROI data back into its original shape. + + Args: + data: Any data (a numpy array) generates by the custom ROI data + reading function in this package + (e.g., read_preproc_func_cifti_roi). + roi_mask: ROI mask dict used to read the data. + + Returns: + A dict with 3 items contains the unmasked data. The keys are + SurfaceL, SurfaceR and Volume, corresponding to the left, right + brain hemisphere and the volume part. + Surface mask is represented in a numpy array. Volume mask is + represented in a nib.nifti1.Nifti1Image image. + + Raises: + ValueError: data is not a 1-d or 2d numpy array. + ValueError: roi_mask is invalid. + """ + + # Check data is a 1-d or 2-d data array + if not (isinstance(data, np.ndarray) and data.ndim <= 2): + raise ValueError("Argument data could only be a 1-d or 2-d numpy array.") + # Check there's no surface part in roi_mask when it has volume part + if (roi_mask["Volume"] is not None) and ( + (roi_mask["SurfaceL"] is not None) or (roi_mask["SurfaceR"] is not None) + ): + raise ValueError( + "Argument roi_mask is invalid. " + "It should be generated by the function 'mask_roi_from_spec'." + ) + # Check there's at least one surface part when there's no surface part + if ( + (roi_mask["Volume"] is None) + and (roi_mask["SurfaceL"] is None) + and (roi_mask["SurfaceR"] is None) + ): + raise ValueError( + "Argument roi_mask is invalid. " + "It should be generated by the function 'mask_roi_from_spec'." + ) + + data_um = {"SurfaceL": None, "SurfaceR": None, "Volume": None} + if roi_mask["Volume"] is not None: + data_um["Volume"] = nlm.unmask(data, roi_mask["Volume"]) + else: + # Concatenate L and R surface mask if it's a bilateral ROI + if (roi_mask["SurfaceL"] is not None) and (roi_mask["SurfaceR"] is not None): + mask = np.hstack((roi_mask["SurfaceL"], roi_mask["SurfaceR"])) + elif roi_mask["SurfaceL"] is not None: + mask = roi_mask["SurfaceL"] + elif roi_mask["SurfaceR"] is not None: + mask = roi_mask["SurfaceR"] + # Set data back to roi_mask shape using fancy indexing + if data.ndim == 2: + data_surf = np.zeros((data.shape[0], mask.shape[0]), dtype=data.dtype) + data_surf[:, mask.astype("bool")] = data + elif data.ndim == 1: + data_surf = np.zeros((mask.shape[0]), dtype=data.dtype) + data_surf[mask.astype("bool")] = data + # Split unmasked L and R hemisphere data + if (roi_mask["SurfaceL"] is not None) and (roi_mask["SurfaceR"] is not None): + # assure lh data is always before rh data in a combined ROI + n_vertex_lh = roi_mask["SurfaceL"].shape[0] + if data.ndim == 2: + data_um["SurfaceL"] = data_surf[:, :n_vertex_lh] + data_um["SurfaceR"] = data_surf[:, n_vertex_lh:] + if data.ndim == 1: + data_um["SurfaceL"] = data_surf[:n_vertex_lh] + data_um["SurfaceR"] = data_surf[n_vertex_lh:] + elif roi_mask["SurfaceL"] is not None: + data_um["SurfaceL"] = data_surf + elif roi_mask["SurfaceR"] is not None: + data_um["SurfaceR"] = data_surf + + return data_um diff --git a/pantheon/plotting/__init__.py b/pantheon/plotting/__init__.py new file mode 100644 index 0000000..a5682fb --- /dev/null +++ b/pantheon/plotting/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/pantheon/plotting/plotly.py b/pantheon/plotting/plotly.py new file mode 100644 index 0000000..a783a36 --- /dev/null +++ b/pantheon/plotting/plotly.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Plotting function for plotly.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from __future__ import annotations +from typing import Optional, Union + +from ..utils.typing import PlotlyFigure + + +def apply_style( + fig: PlotlyFigure, + height: Optional[Union[int, float]] = None, + width: Optional[Union[int, float]] = None, + base_font_size: int = 16, + font_family: str = "Arial", + font_color: str = "black", + showline: bool = True, + showgrid: bool = True, + showtitle_x: bool = True, + showtitle_y: bool = True, + showlegend: bool = True, + showlegend_title: bool = False, + template: str = "plotly_white", +) -> PlotlyFigure: + """Applys custom plotly style to figure. + + Args: + fig: A Ploty Figure. + height: Figure height. If it's None, use a aspect ratio 1.618. + width: Figure width. If it's None, use a aspect ratio 1.618. + base_font_size: Base font size. + font_family: Font typeface. + font_color: Font color. + showline: Show axis line. Default: True. + showgrid: Show grid in figure. Default: True. + showtitle_x: Show x axis title. Default: True. + showtitle_y: Show y axis title. Default: True. + showlegend: Show figure legend. Default: True. + showlegend_title: Show legned title. Default: False. + template: Plotly default figure style template. + + Returns: + A plotly Figure object. + """ + + # use plotly theme + if template: + fig.update_layout(template=template) + # figure size + if height or width: + # use aspect ratio 1.618 if only one dimension is set + if height is None: + height = width / 1.618 + if width is None: + width = height * 1.618 + fig.update_layout(autosize=False, height=height, width=width) + # font color + fig.update_layout(title_font_color=font_color, legend_font_color=font_color) + fig.update_xaxes(dict(title_font_color=font_color, tickfont_color=font_color)) + fig.update_yaxes(dict(title_font_color=font_color, tickfont_color=font_color)) + # font size + if base_font_size: + fig.update_layout(font_size=base_font_size) + # axes + fig.update_xaxes( + dict( + title_font=dict(size=int(base_font_size * 1.25)), + tickfont=dict(size=base_font_size), + ) + ) + fig.update_yaxes( + dict( + title_font=dict(size=int(base_font_size * 1.25)), + tickfont=dict(size=base_font_size), + ) + ) + # title + fig.update_layout(title=dict(font=dict(size=int(base_font_size * 1.5)))) + # legend + fig.update_layout(legend=dict(font=dict(size=base_font_size))) + # annotation + fig.update_annotations(font=dict(size=base_font_size)) + # font family + if font_family: + fig.update_layout(font_family=font_family) + # show axis line + fig.update_xaxes(showline=showline, linewidth=2, linecolor="black", ticks="outside") + fig.update_yaxes(showline=showline, linewidth=2, linecolor="black", ticks="outside") + # show background grid + fig.update_xaxes(showgrid=showgrid) + fig.update_yaxes(showgrid=showgrid) + # hide x, y axis title + if not showtitle_x: + fig.update_xaxes(title_text="") + if not showtitle_y: + fig.update_yaxes(title_text="") + # show figure legend + fig.update_layout(showlegend=showlegend) + # hide figure legend title + if not showlegend_title: + fig.update_layout(legend_title_text="") + + return fig diff --git a/pantheon/preprocess/__init__.py b/pantheon/preprocess/__init__.py new file mode 100644 index 0000000..a5682fb --- /dev/null +++ b/pantheon/preprocess/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/pantheon/preprocess/bold.py b/pantheon/preprocess/bold.py new file mode 100644 index 0000000..92de28f --- /dev/null +++ b/pantheon/preprocess/bold.py @@ -0,0 +1,307 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: +# + +from __future__ import annotations +from typing import Optional, Union +from pathlib import Path +import shutil +import tempfile +import numpy as np + +from ..image.gifti import sanitize_gii_metadata +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +def find_good_voxel( + func_file: PathLike, + ribbon_file: PathLike, + out_file: PathLike, + neighborhood_smoothing: Union[float, int] = 5, + ci_limit: Union[float, int] = 0.5, + debug: bool = False, +) -> Path: + """Finds good voxels based on coefficient of variation. + + Args: + func_file: Functional image file. + ribbon_file: Cortex ribbon file. + out_file: Output good voxels mask file. + neighborhood_smoothing: Spatial smoothing kernal sigma (mm). + ci_limit: Parameter to control the good voxel threshold. Smaller + value relates to stricter threshold. + debug: If true, output intermediate files to out_dir. + + Returns: + A good voxels mask file. + """ + + with tempfile.TemporaryDirectory() as tmp_dir: + + # Calculate coefficient of variation (cov) of the func data + tmean_file = Path(tmp_dir).joinpath("Mean.nii.gz") + tstd_file = Path(tmp_dir).joinpath("SD.nii.gz") + cov_file = Path(tmp_dir).joinpath("cov.nii.gz") + run_cmd(f"fslmaths {func_file} -Tmean {tmean_file} -odt float") + run_cmd(f"fslmaths {func_file} -Tstd {tstd_file} -odt float") + run_cmd(f"fslmaths {tstd_file} -div {tmean_file} {cov_file}") + + # Calculate modulated cov within cortical ribbon + cov_ribbon_file = Path(tmp_dir).joinpath("cov_ribbon.nii.gz") + cov_ribbon_norm_file = Path(tmp_dir).joinpath("cov_ribbon_norm.nii.gz") + sm_norm_file = Path(tmp_dir).joinpath("SmoothNorm.nii.gz") + cov_ribbon_norm_sm_file = Path(tmp_dir).joinpath("cov_ribbon_norm_smooth.nii.gz") + cov_norm_modulate_file = Path(tmp_dir).joinpath("cov_norm_modulate.nii.gz") + cov_norm_modulate_ribbon_file = Path(tmp_dir).joinpath("cov_norm_modulate_ribbon.nii.gz") + run_cmd(f"fslmaths {cov_file} -mas {ribbon_file} {cov_ribbon_file}") + res = run_cmd(f"fslstats {cov_ribbon_file} -M", print_output=False) + cov_ribbon_mean = float(res.stdout) + run_cmd(f"fslmaths {cov_ribbon_file} -div {cov_ribbon_mean} " f"{cov_ribbon_norm_file}") + run_cmd(f"fslmaths {cov_ribbon_norm_file} -bin -s {neighborhood_smoothing} {sm_norm_file}") + run_cmd( + f"fslmaths {cov_ribbon_norm_file} -s {neighborhood_smoothing} " + f"-div {sm_norm_file} -dilD {cov_ribbon_norm_sm_file}" + ) + run_cmd( + f"fslmaths {cov_file} -div {cov_ribbon_mean} -div " + f"{cov_ribbon_norm_sm_file} {cov_norm_modulate_file}" + ) + run_cmd( + f"fslmaths {cov_norm_modulate_file} -mas {ribbon_file} {cov_norm_modulate_ribbon_file}" + ) + + # Make good voxel mask + mask_file = Path(tmp_dir).joinpath("mask.nii.gz") + res = run_cmd(f"fslstats {cov_norm_modulate_ribbon_file} -M", print_output=False) + ribbon_mean = float(res.stdout) + res = run_cmd(f"fslstats {cov_norm_modulate_ribbon_file} -S", print_output=False) + ribbon_std = float(res.stdout) + ribbon_upper = ribbon_mean + ribbon_std * ci_limit + print(f"Good voxel threshold for {Path(func_file).name}: {ribbon_upper}") + run_cmd(f"fslmaths {tmean_file} -bin {mask_file}") + run_cmd( + f"fslmaths {cov_norm_modulate_file} -thr {ribbon_upper} -bin -sub " + f"{mask_file} -mul -1 -thr 1 -bin {out_file} -odt int" + ) + + # Output intermediate files if requested + if debug: + out_dir = Path(out_file).parent + suffix = suffix = Path(func_file).stem.split(".")[0] + shutil.copytree( + tmp_dir, + Path(out_dir).joinpath(f"temp_goodvoxel_{suffix}"), + dirs_exist_ok=True, + ) + + return Path(out_file) + + +def sample_volume_to_surface( + func_file: PathLike, + wm_file: PathLike, + pial_file: PathLike, + midthickness_file: PathLike, + out_file: PathLike, + vol_mask_file: Optional[PathLike] = None, + surf_mask_file: Optional[PathLike] = None, + dilate_distance: Optional[Union[float, int]] = 10, +) -> Path: + """Resamples data in volume space to surface space. + + Args: + func_file: Functional image file. + wm_file: White surface file. + pial_file: Pial surface file. + midthickness_file: Midthickness file. + out_file: Output functional image file in surface space. + vol_mask_file: Volume mask file applies to func_file. Optional. + surf_mask_file: Surface mask file applies to sampled surface + functional image file. + dilate_distance: Dilate distance (mm) applies to surface sampled + data. + + Returns: + A functional image file in surface space. + """ + + # Sample from volume to surface + cmd = ( + f"wb_command -disable-provenance -volume-to-surface-mapping {func_file} " + f"{midthickness_file} {out_file} -ribbon-constrained {wm_file} {pial_file} " + ) + if vol_mask_file is not None: + cmd += f"-volume-roi {vol_mask_file}" + run_cmd(cmd) + + # Dilate mapped surface + # This step follows HCPpipeline + # This could fix some bad vertices during the mapping + if dilate_distance: + run_cmd( + f"wb_command -disable-provenance -metric-dilate {out_file} " + f"{midthickness_file} {dilate_distance} {out_file} -nearest" + ) + + # Apply surface mask to sampled data + if surf_mask_file is not None: + run_cmd( + f"wb_command -disable-provenance -metric-mask " + f"{out_file} {surf_mask_file} {out_file}" + ) + + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + + return Path(out_file) + + +def extract_func_subcortical( + func_std_file: PathLike, + seg_std_file: PathLike, + template_seg_file: PathLike, + out_file: PathLike, + smoothing_fwhm: Optional[Union[float, int]] = None, + debug: bool = False, +) -> Path: + """Extracts functional data in standard subcortical regions. + + Args: + func_std_file: Functional image file in standard volume space. + seg_std_file: Subcortical segmentation image file in standard + volume space. + template_seg_file: Subcortical segmentation image file in + standard volume space. This file is used to make a + segmentation label file. + out_file: Output subcortical functional image file. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). If + None, no spatial smoothing applies to the functional data. + Note, this operation is constrained within each subcortical + region. + debug: If true, output intermediate files to out_dir. + + Returns: + A functional image file contains only subcortical region data. + + Raises: + ValueError: func_std_file or seg_std_file is not in + MNI152NLin6Asym space (2mm). + """ + + # Check input data in MNI space + if ("space-MNI152NLin6Asym_res-2" not in Path(func_std_file).name) or ( + "space-MNI152NLin6Asym_res-2" not in Path(seg_std_file).name + ): + raise ValueError("Input data and segmentation should be in MNI152NLin6Asym space (2mm). ") + + with tempfile.TemporaryDirectory() as tmp_dir: + + # Create template subcortical dense label (in MNI152NLin6Asym space) + roi_template_file = Path(tmp_dir).joinpath("template_roi.dlabel.nii") + run_cmd( + f"wb_command -disable-provenance -cifti-create-label {roi_template_file} " + f"-volume {template_seg_file} {template_seg_file}" + ) + + # Create dense timeseries using subject subcortical ROI (in MNI152NLin6Asym space) + func_file = Path(tmp_dir).joinpath("func.dtseries.nii") + run_cmd( + f"wb_command -disable-provenance -cifti-create-dense-timeseries {func_file} " + f"-volume {func_std_file} {seg_std_file}" + ) + # Dilate out any exact zeros voxel in func_file + func_dil_file = Path(tmp_dir).joinpath("func_dil.dtseries.nii") + run_cmd( + f"wb_command -disable-provenance -cifti-dilate {func_file} " + f"COLUMN 0 30 {func_dil_file}" + ) + # Smoothing if requested + if smoothing_fwhm is not None: + sigma = convert_fwhm_to_sigma(smoothing_fwhm) + func_dil_sm_file = Path(tmp_dir).joinpath("func_dil_sm.dtseries.nii") + run_cmd( + f"wb_command -disable-provenance -cifti-smoothing {func_dil_file} " + f"0 {sigma} COLUMN {func_dil_sm_file} -fix-zeros-volume" + ) + else: + func_dil_sm_file = func_dil_file + # Resample func dense timeseries to template subcortical ROI + func_template_file = Path(tmp_dir).joinpath("func_template.dtseries.nii") + run_cmd( + f"wb_command -disable-provenance -cifti-resample {func_dil_sm_file} COLUMN " + f"{roi_template_file} COLUMN ADAP_BARY_AREA CUBIC {func_template_file} " + "-volume-predilate 10" + ) + # Dilate again to ensure no zero in standard ROIs + template_func_dil_file = Path(tmp_dir).joinpath("func_template_dil.dtseries.nii") + run_cmd( + f"wb_command -disable-provenance -cifti-dilate {func_template_file} COLUMN 0 30 " + f"{template_func_dil_file}" + ) + + # Save functional data in subcortical to NIFTI file + run_cmd( + f"wb_command -disable-provenance -cifti-separate {template_func_dil_file} COLUMN " + f"-volume-all {out_file}" + ) + + # Save intermediate files if requested + if debug: + out_dir = Path(out_file).parent + suffix = Path(func_std_file).stem.split(".")[0] + if smoothing_fwhm is not None: + suffix += "_sm{}".format(convert_fwhm_to_str(smoothing_fwhm)) + shutil.copytree( + tmp_dir, + Path(out_dir).joinpath(f"temp_subcortical_{suffix}"), + dirs_exist_ok=True, + ) + + return Path(out_file) + + +def make_func_map_name( + func_file: PathLike, + timestep: Union[float, int], + out_file: PathLike, + float_format: str = ":.1f", +) -> Path: + """Writes CIFTI/GIFTI map name of a file to text file. + + This functions generates a series of map name based on the input + repetition time, which could be used in a CIFTI dtseries file. + + Args: + func_file: Functional image file (NIFTI or GIFTI). This file is + used to calculate the total number of time points. + timestep: The temporal interval of consecutive time points in + the func_file. Usually it's the repetition time of the + functional image. + out_file: Output map name file. + float_format: Float number format in the map name. + + Returns: + A map name text file. + """ + + nvol = int(run_cmd(f"fslnvols {func_file}", print_output=False).stdout) + mapname = "" + for i in np.arange(0, nvol * timestep, timestep): + mapname += ("{" + float_format + "} seconds\n").format(i) + Path(out_file).write_text(mapname) + return Path(out_file) + + +def convert_fwhm_to_sigma(fwhm: Union[float, int]) -> float: + """Converts smoothing FWHM to gaussian sigma.""" + return float(fwhm) / (2 * np.sqrt(2 * np.log(2))) + + +def convert_fwhm_to_str(fwhm: Union[float, int]) -> str: + """Converts smoothing FWHM to pure string representation.""" + return f"{fwhm:0.1f}".replace(".", "pt") diff --git a/pantheon/preprocess/freesurfer.py b/pantheon/preprocess/freesurfer.py new file mode 100644 index 0000000..de6fa93 --- /dev/null +++ b/pantheon/preprocess/freesurfer.py @@ -0,0 +1,458 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from __future__ import annotations +from typing import Optional, Literal +from pathlib import Path +import shutil +import tempfile + +from ..image.gifti import sanitize_gii_metadata +from ..utils.validation import parse_hemi +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +def convert_freesurfer_geometry_surface( + sub_id: str, + hemi: Literal["L", "R"], + surf_id: str, + fs_dir: PathLike, + out_dir: PathLike, + adjust_cras: bool = True, + xfm_file: Optional[PathLike] = None, + debug: bool = False, +) -> Path: + """Converts FreeSurfer's geometry surfaces to GIFTI format. + + Args: + sub_id: SubjectID. + hemi: Brain hemisphere. + surf_id: Surface name in FreeSurfer's outputs. + fs_dir: Subject's FreeSurfer output directory. + out_dir: Directory to store output file. + adjust_cras: If true, adjust the cras offset which FreeSurfer + stores in file's header. + xfm_file: An ITK format affine transformation matrix file. If it + is given, applying it to the surface file. Optional. + debug: If true, output intermediate files to out_dir. + + Returns: + A surface mesh file in GIFTI format. + + Raises: + ValueError: Unrecognized surf_id. + """ + + # Parse hemisphere + hemi, structure = parse_hemi(hemi) + + # Surface metadata + if surf_id in ["white", "wm"]: + fs_surf_id, surf_id, surf_type, surf_secondary_type = ( + "white", + "wm", + "ANATOMICAL", + "GRAY_WHITE", + ) + elif surf_id == "pial": + fs_surf_id, surf_id, surf_type, surf_secondary_type = ( + "pial", + "pial", + "ANATOMICAL", + "PIAL", + ) + elif surf_id == "sphere": + fs_surf_id, surf_id, surf_type, surf_secondary_type = ( + "sphere", + "sphere", + "SPHERICAL", + None, + ) + elif surf_id == "sphere.reg": + fs_surf_id, surf_id, surf_type, surf_secondary_type = ( + "sphere.reg", + "sphere", + "SPHERICAL", + None, + ) + else: + raise ValueError( + f"Unrecognized surface name: {surf_id} ...\n" + "Valid option: 'wm', 'pial', 'sphere', 'sphere.reg'" + ) + + # Convert surface from FreeSurfer format to GIFTI format + surf_file = Path(fs_dir).joinpath(f"sub-{sub_id}", "surf", f"{hemi.lower()}h.{fs_surf_id}") + fname = f"sub-{sub_id}_hemi-{hemi}_space-fsnative_den-fsnative_{surf_id}.surf.gii" + if fs_surf_id == "sphere.reg": + fname = fname.replace(f"space-fsnative", f"space-fsaverage") + out_file = Path(out_dir).joinpath(fname) + print(f"Converting {surf_file} ...", flush=True) + + # Set GIFTI metadata + run_cmd(f"mris_convert {surf_file} {out_file}") + set_structure_cmd = ( + f"wb_command -disable-provenance -set-structure {out_file} " + f"{structure} -surface-type {surf_type}" + ) + if surf_secondary_type: + set_structure_cmd += f" -surface-secondary-type {surf_secondary_type}" + run_cmd(set_structure_cmd) + + # Adjust CRAS if the matrix is supplied + # A note from niworkflow.interfaces.surf.NormalizeSurf: + # FreeSurfer includes an offset to the center of the brain + # volume that is not respected by all software packages. + # Normalization involves adding this offset to the coordinates + # of all vertices, and zeroing out that offset, to ensure + # consistent behavior across software packages. + # In particular, this normalization is consistent with the Human + # Connectome Project pipeline (see `AlgorithmSurfaceApplyAffine` + # _ and `FreeSurfer2CaretConvertAndRegisterNonlinear`_), + # although the the HCP may not zero out the offset. + if adjust_cras: + with tempfile.TemporaryDirectory() as tmp_dir: + ref_file = Path(fs_dir).joinpath(f"sub-{sub_id}", "mri", "brain.finalsurfs.mgz") + cras_file = get_cras(ref_file, tmp_dir) + print(f"Adjusting CRAS offset: {fname} ...", flush=True) + run_cmd( + "wb_command -disable-provenance -surface-apply-affine " + f"{out_file} {cras_file} {out_file}" + ) + # Cleanup CRAS codes in GIFTI metadata + # See https://github.com/nipreps/niworkflows/blob/a2d3686bb9b184ec15e2147a3ae6f86c7e066929/niworkflows/interfaces/surf.py#L562 + # Using AFNI's gifti_tool + reset_cras_cmd = f"gifti_tool -infile {out_file} -write_gifti {out_file}" + for key in ["VolGeomC_R", "VolGeomC_A", "VolGeomC_S"]: + reset_cras_cmd += f" -mod_DA_meta {key} 0.000000" + run_cmd(reset_cras_cmd) + # Output CRAS matrix if requested + if debug: + Path(out_dir).joinpath("temp_cras").mkdir(exist_ok=True) + shutil.copy( + cras_file, + Path(out_dir).joinpath( + "temp_cras", f"sub-{sub_id}_hemi-{hemi}_{surf_id}_desc-cras_xfm.mat" + ), + ) + + # Apply affine transformation if the matrix is supplied + if xfm_file: + apply_affine_transformation_to_surface(out_file, xfm_file, out_dir, debug=debug) + + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file, gim_meta={"UserName": ""}, da_meta={"Name": ""}) + + return out_file + + +def convert_freesurfer_metric( + sub_id: str, hemi: Literal["L", "R"], metric_id: str, fs_dir: PathLike, out_dir: PathLike +) -> Path: + """Converts FreeSurfer's metric map to GIFTI format. + + Args: + sub_id: SubjectID. + hemi: Brain hemisphere. + metric_id: Surface metric name in FreeSurfer's outputs. + fs_dir: Subject's FreeSurfer output directory. + out_dir: Directory to store output file. + + Returns: + A surface metric file in GIFTI format. + + Raises: + ValueError: Unrecognized metric_id. + """ + + # Parse hemisphere + hemi, structure = parse_hemi(hemi) + + # Parse metric + if metric_id in ["sulc", "curv"]: + palette_mode = "MODE_AUTO_SCALE_PERCENTAGE" + palette_options = ( + "-pos-percent 2 98 -palette-name Gray_Interp -disp-pos true " + "-disp-neg true -disp-zero true" + ) + elif metric_id == "thickness": + palette_mode = "MODE_AUTO_SCALE_PERCENTAGE" + palette_options = ( + "-pos-percent 4 96 -interpolate true -palette-name videen_style " + "-disp-pos true -disp-neg false -disp-zero false" + ) + else: + raise ValueError(f"Unrecognized metric: {metric_id} ...\n") + + # Convert metric file + metric_file = Path(fs_dir).joinpath(f"sub-{sub_id}", "surf", f"{hemi.lower()}h.{metric_id}") + wm_file = Path(fs_dir).joinpath(f"sub-{sub_id}", "surf", f"{hemi.lower()}h.white") + out_file = Path(out_dir).joinpath( + f"sub-{sub_id}_hemi-{hemi}_space-fsnative_den-fsnative_{metric_id}.shape.gii" + ) + print(f"Converting metric: {metric_file} ...", flush=True) + run_cmd(f"mris_convert -c {metric_file} {wm_file} {out_file}") + + # Set GIFTI metadata + run_cmd("wb_command -disable-provenance -set-structure " f"{out_file} {structure}") + run_cmd( + f"wb_command -disable-provenance -metric-math 'var * -1' {out_file} -var var {out_file}" + ) + run_cmd( + f"wb_command -disable-provenance -set-map-names {out_file} " + f"-map 1 sub-{sub_id}_hemi-{hemi}_{metric_id}" + ) + run_cmd( + "wb_command -disable-provenance -metric-palette " + f"{out_file} {palette_mode} {palette_options}" + ) + + # Additional step for thickness metric + # From https://github.com/Washington-University/HCPpipelines/blob/1334b35ab863540044333bbdec70a68fb19ab611/PostFreeSurfer/scripts/FreeSurfer2CaretConvertAndRegisterNonlinear.sh#L362 + if metric_id == "thickness": + run_cmd( + "wb_command -disable-provenance -metric-math 'abs(thickness)' " + f"{out_file} -var thickness {out_file}" + ) + + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file, da_atr={"Intent": "NIFTI_INTENT_SHAPE"}) + + return out_file + + +def convert_freesurfer_annot( + sub_id: str, hemi: Literal["L", "R"], annot_id: str, fs_dir: PathLike, out_dir: PathLike +) -> Path: + """Converts FreeSurfer's annotation data to GIFTI format. + + Args: + sub_id: SubjectID. + hemi: Brain hemisphere. + annot_id: Surface annotation name in FreeSurfer's outputs. + fs_dir: Subject's FreeSurfer output directory. + out_dir: Directory to store output file. + + Returns: + A surface label file in GIFTI format. + + Raises: + ValueError: Unrecognized annot_id. + """ + + # Parse hemisphere + hemi, structure = parse_hemi(hemi) + + # Parse annotation name + if annot_id == "aparc": + atlas_id = "Aparc" + elif annot_id == "aparc.a2009s": + atlas_id = "Destrieux" + elif annot_id == "aparc.DKTatlas": + atlas_id = "DKT" + else: + raise ValueError(f"Unrecognized annotation: {annot_id} ...\n") + + # Convert annotation file + annot_file = Path(fs_dir).joinpath( + f"sub-{sub_id}", "label", f"{hemi.lower()}h.{annot_id}.annot" + ) + wm_file = Path(fs_dir).joinpath(f"sub-{sub_id}", "surf", f"{hemi.lower()}h.white") + out_file = Path(out_dir).joinpath( + f"sub-{sub_id}_hemi-{hemi}_space-fsnative_den-fsnative_desc-{atlas_id}_dseg.label.gii" + ) + print(f"Converting annotation: {annot_file} ...", flush=True) + run_cmd(f"mris_convert --annot {annot_file} {wm_file} {out_file}") + + # Set GIFTI metadata + run_cmd("wb_command -disable-provenance -set-structure " f"{out_file} {structure}") + run_cmd( + f"wb_command -disable-provenance -set-map-names {out_file} -map 1 " + f"sub-{sub_id}_hemi-{hemi}_desc-{atlas_id}" + ) + run_cmd( + f"wb_command -disable-provenance -gifti-label-add-prefix {out_file} {hemi}_ {out_file}" + ) + + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + + return out_file + + +def convert_freesurfer_volume( + sub_id: str, + volume_id: str, + fs_dir: PathLike, + out_dir: PathLike, + xfm_file: Optional[PathLike] = None, + ref_file: Optional[PathLike] = None, + lut_file: Optional[PathLike] = None, +) -> Path: + """Converts FreeSurfer's volume image to NIFTI format. + + Args: + sub_id: SubjectID. + volume_id: Volume image name in FreeSurfer's outputs. + fs_dir: Subject's FreeSurfer output directory. + out_dir: Directory to store output file. + xfm_file: An ITK format affine transformation matrix file. If it + is given, applying it to the volume file. Optional. + ref_file: Reference volume file for xfm_file. Optional. + lut_file: Lut file contains label information of FreeSurfer's + parcellations. It is used to import label information to + parcellation NIFTI image header. Optional. + + Returns: + A volume image file. + + Raises: + ValueError: ref_file is not specified when xfm_file is given. + """ + + # Conform name between FS and output + names = { + "T1": "T1w", + "aparc+aseg": "AparcAseg", + "aparc.a2009s+aseg": "DestrieuxAseg", + "aparc.DKTatlas+aseg": "DKTAseg", + "wmparc": "WMParc", + } + atlas_list = ["wmparc", "aparc.a2009s+aseg", "aparc+aseg", "aparc.DKTatlas+aseg"] + + # Convert volume from mgz to nifti + # regular volume + if not volume_id in atlas_list: + in_file = Path(fs_dir).joinpath(f"sub-{sub_id}", "mri", f"{volume_id}.mgz") + out_file = Path(out_dir).joinpath( + f"sub-{sub_id}_space-T1w_desc-FS_{names[volume_id]}.nii.gz" + ) + print(f"Converting {in_file} ...", flush=True) + run_cmd(f"mri_convert {in_file} {out_file}") + # parcellation + else: + in_file = Path(fs_dir).joinpath(f"sub-{sub_id}", "mri", f"{volume_id}.mgz") + out_file = Path(out_dir).joinpath( + f"sub-{sub_id}_space-T1w_desc-{names[volume_id]}_dseg.nii.gz" + ) + print(f"\nConverting {in_file} ...", flush=True) + run_cmd(f"mri_convert {in_file} {out_file}") + + # Apply affine transformation to align volume to reference + if xfm_file is not None: + if ref_file is None: + raise ValueError("If xfm_file is provided, ref_file is also required.") + print(f"\nApply affine transformation: {out_file} ...", flush=True) + cmd = f"antsApplyTransforms -i {out_file} -r {ref_file} -o {out_file} -t {xfm_file} " + # regular volume + if not volume_id in atlas_list: + cmd += "-u float -n LanczosWindowedSinc" + # parcellation + else: + cmd += "-u int -n MultiLabel" + run_cmd(cmd) + + # Import label information to NIFTI file + if (volume_id in atlas_list) and (lut_file is not None): + print(f"\nImporting label information: {out_file} ...", flush=True) + run_cmd( + "wb_command -disable-provenance -logging SEVERE -volume-label-import " + f"{out_file} {lut_file} {out_file} -drop-unused-labels" + ) + + return out_file + + +def get_cras(ref_file: PathLike, out_dir: PathLike) -> Path: + """Writes FreeSurfer's CRAS matrix to file. + + Args: + ref_file: A reference image file to get the CRAS matrix. + out_dir: Directory to store output file. + + Returns: + A text file contains CRAS matrix. + """ + + cras_file = Path(out_dir).joinpath(f"cras_xfm.mat") + # Get cras infomation + cras = run_cmd(f"mri_info --cras {ref_file}", print_output=False) + cras = cras.stdout.replace("\n", "").split(" ") + # Write out the cras infomation like an affine matrix + with open(cras_file, "w") as f: + f.write(f"1 0 0 {cras[0]}\n") + f.write(f"0 1 0 {cras[1]}\n") + f.write(f"0 0 1 {cras[2]}\n") + f.write(f"0 0 0 1\n") + + return cras_file + + +def apply_affine_transformation_to_surface( + surf_file: PathLike, itk_file: PathLike, out_dir: PathLike, debug: bool = False +) -> Path: + """Applies affine transformation to a surface in GIFTI format. + + Args: + surf_file: A surface file in GIFTI format. + itk_file: Affine transformation matrix (ITK format) file. + out_dir: Directory to store output file. + debug: If true, output intermediate files to out_dir. + + Returns: + A surface file in GIFTI format. + """ + + # Temporarily modify itk affine matrix header + # The command `wb_command -convert-affine` is used for converting + # itk format affine matrix to a NIFTI world affine. + # In `wb_command`, the accepted itk affine class is + # `MatrixOffsetTransformBase_double_3_3`, which is a base class in + # itk specification. And this class should never be passed to a + # downstream software. + # See https://github.com/Washington-University/workbench/blob/f31a4edc490c3b8afa2ecca1e97390a53719fb33/src/Files/AffineFile.cxx#L150 + # for codes in `wb_command` source file. + # In order to mitigate this issue, here we modify the affine matrix + # created by fMRIprep manually to satisfy the `wb_command`. + # Since the class `AffineTransform_float_3_3` is a subclass of + # `MatrixOffsetTransformBase_double_3_3`, this modification should + # be pretty safe. + with tempfile.TemporaryDirectory() as tmp_dir: + + itk_mod_file = Path(tmp_dir).joinpath( + Path(Path(itk_file).name.replace("_xfm", "_desc-modified_xfm")) + ) + _ = _modify_itk_class(itk_file, itk_mod_file) + + # Convert transformation matrix from itk format to NIFTI 'world' affine + world_affine_file = Path(tmp_dir).joinpath( + Path(Path(itk_file).name.replace("_xfm", "_desc-world_xfm")) + ) + run_cmd( + "wb_command -convert-affine " f"-from-itk {itk_mod_file} -to-world {world_affine_file}" + ) + + # Apply affine transformation to surface + print(f"Applying affine transformation to {Path(surf_file).name} ...", flush=True) + run_cmd( + "wb_command -disable-provenance -surface-apply-affine " + f"{surf_file} {world_affine_file} {surf_file}" + ) + + # Output affine matrix if requested + if debug: + shutil.copytree(tmp_dir, Path(out_dir).joinpath("temp_affine"), dirs_exist_ok=True) + + return Path(surf_file) + + +def _modify_itk_class(in_file: PathLike, out_file: PathLike) -> Path: + """Modifies ITK affine matrix class information.""" + + with open(in_file, "r") as f: + data = f.read() + data = data.replace("AffineTransform_float_3_3", "MatrixOffsetTransformBase_double_3_3") + with open(out_file, "w") as f: + f.write(data) + return Path(out_file) diff --git a/pantheon/preprocess/registration.py b/pantheon/preprocess/registration.py new file mode 100644 index 0000000..07ebeac --- /dev/null +++ b/pantheon/preprocess/registration.py @@ -0,0 +1,214 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from __future__ import annotations +from typing import Optional, Literal +from pathlib import Path +import shutil +import tempfile + +from ..image.gifti import sanitize_gii_metadata +from ..utils.validation import parse_hemi +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +def warp_native_sphere_to_fsLR( + in_sphere_file: PathLike, + proj_sphere_file: PathLike, + unproj_sphere_file: PathLike, + out_file: PathLike, +) -> Path: + """Deforms fsnative sphere in fsaverage space to fsLR space. + + This function warps sphere mesh in native space to fsLR space. The + sphere mesh density remains the same after warpping. It relies on + the sphere in fsaverage space generated by FreeSurfer. + + Args: + in_sphere_file: Native sphere mesh file in fsaverage space + (generated by FreeSurfer). + proj_sphere_file: Standard fsaverage sphere mesh file. + unproj_sphere_file: fsaverage sphere mesh file in fsLR space. + out_file: Output native sphere mesh file. + + Returns: + A sphere mesh file in fsLR space with native mesh density. + """ + + print(f"Warpping native sphere to fsLR space: {in_sphere_file} ...", flush=True) + run_cmd( + "wb_command -disable-provenance -surface-sphere-project-unproject " + f"{in_sphere_file} {proj_sphere_file} {unproj_sphere_file} {out_file}" + ) + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file, da_meta={"Name": ""}) + + return Path(out_file) + + +def calc_native_to_fsLR_registration_MSMSulc( + hemi: Literal["L", "R"], + in_sphere_file: PathLike, + fs_aligned_sphere_file: PathLike, + in_sulc_file: PathLike, + ref_sphere_file: PathLike, + ref_sulc_file: PathLike, + out_file: PathLike, + msm_config_file: PathLike, + debug: bool = False, +): + """Calculates registration between native and fsLR space. + + This function aligns a sphere mesh file in native space to fsLR + space through MSMSulc method. The mesh density remains the same as + the native sphere mesh. Usually, the result of this method is + slightly different from (better than) the FreeSurfer's result. + The fs_aligned_sphere_file is used for initializing the MSMSulc + registration. + + Args: + hemi: Brain hemisphere. + in_sphere_file: Native sphere mesh file in native space. + fs_aligned_sphere_file: Native sphere mesh file in fsLR space. + Usually it is generated by function + 'warp_native_sphere_to_fsLR'. + in_sulc_file: Metric sulc file in native space. + ref_sphere_file: Standard fsLR sphere file. + ref_sulc_file: Standard metric sulc file in fsLR space. + out_file: Output native sphere mesh file. + msm_config_file: MSMSulc configuration file. + debug: If true, output intermediate files. + + Returns: + A sphere mesh file in fsLR space with native mesh density. + """ + + # Parse hemisphere + hemi, structure = parse_hemi(hemi) + + with tempfile.TemporaryDirectory() as tmp_dir: + + # Calculate affine transformation to align fsnative and fsLR sphere + print(f"Aligning fsnative and fsLR sphere (affine): {in_sphere_file} ... ", flush=True) + affine_file = Path(tmp_dir, f"hemi-{hemi}_from-fsnative_to-fsLR_mode-image_xfm.mat") + run_cmd( + "wb_command -disable-provenance -surface-affine-regression " + f"{in_sphere_file} {fs_aligned_sphere_file} {affine_file}" + ) + # Apply affine transformation to input sphere + aligned_sphere_file = Path( + tmp_dir, f"hemi-{hemi}_space-fsnative_den-fsnative_desc-fsLRrot_sphere.surf.gii" + ) + run_cmd( + "wb_command -disable-provenance -surface-apply-affine " + f"{in_sphere_file} {affine_file} {aligned_sphere_file}" + ) + # Change sphere radius + run_cmd( + "wb_command -disable-provenance -logging SEVERE -surface-modify-sphere " + f"{aligned_sphere_file} 100 {aligned_sphere_file}" + ) + # Cleanup metadata + _ = sanitize_gii_metadata(aligned_sphere_file, aligned_sphere_file, da_meta={"Name": ""}) + + # Run MSMSulc registration + print(f"Running MSMSulc registration to fsLR space: {in_sphere_file} ...", flush=True) + run_cmd( + f"msm --conf={msm_config_file} --inmesh={aligned_sphere_file} " + f"--refmesh={ref_sphere_file} --indata={in_sulc_file} " + f"--refdata={ref_sulc_file} --out={tmp_dir}/{hemi}." + ) + # Copy outputs and set GIFTI metadata + shutil.copy(Path(tmp_dir).joinpath(f"{hemi}.sphere.reg.surf.gii"), out_file) + run_cmd(f"wb_command -disable-provenance -set-structure {out_file} {structure}") + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file, da_meta={"Name": ""}) + + # Output registration files if requested + if debug: + out_dir = Path(out_file).parent + shutil.copy(msm_config_file, tmp_dir) + shutil.copytree(tmp_dir, out_dir.joinpath("temp_MSMSulc"), dirs_exist_ok=True) + + return Path(out_file) + + +def calc_registration_distortion( + src_sphere_file: PathLike, + warpped_sphere_file: PathLike, + out_file: PathLike, + metric_id: Literal["Areal", "Edge", "StrainJ", "StrainR"], + gifti_map_name: Optional[str] = None, +) -> Path: + """Calculates distortion map of warpped surface sphere. + + Args: + src_sphere_file: The orginal surface sphere file. + wappped_sphere_file: Warpped surface sphere file. + out_file: Output distortion map file. + metric_id: Distortion metric. + gifti_map_name: Map name of distortion file. + + Returns: + A registration distortion GIFTI file. + + Raises: + ValueError: Unrecognized distortion metric_id. + """ + + # Parse distortion metric name + if metric_id not in ["Areal", "Edge", "StrainJ", "StrainR"]: + raise ValueError( + f"Unrecognized distortion metric {metric_id}. Valid: Areal, Edge, StrainJ, StrainR" + ) + + print(f"Calculating registration {metric_id} distortion: {src_sphere_file} ...", flush=True) + # Areal + if metric_id == "Areal": + run_cmd( + "wb_command -disable-provenance -surface-distortion " + f"{src_sphere_file} {warpped_sphere_file} {out_file}" + ) + # Edge + if metric_id == "Edge": + run_cmd( + "wb_command -disable-provenance -surface-distortion " + f"{src_sphere_file} {warpped_sphere_file} {out_file} -edge-method" + ) + # StrainJ, StrainR + if metric_id in ["StrainJ", "StrainR"]: + # map column in the strain file + col_id = 1 if metric_id == "StrainJ" else 2 + with tempfile.TemporaryDirectory() as tmp_dir: + strain_file = Path(tmp_dir).joinpath("Strain_distortion.shape.gii") + run_cmd( + "wb_command -disable-provenance -surface-distortion " + f"{src_sphere_file} {warpped_sphere_file} {strain_file} -local-affine-method" + ) + run_cmd( + "wb_command -disable-provenance -metric-merge " + f"{out_file} -metric {strain_file} -column {col_id}" + ) + run_cmd( + "wb_command -disable-provenance -metric-math ln(var)/ln(2) " + f"{out_file} -var var {out_file}" + ) + + # Set GIFTI metadata + palette_mode = "MODE_USER_SCALE" + palette_options = ( + "-pos-user 0 1 -neg-user 0 -1 -interpolate true -palette-name ROY-BIG-BL " + "-disp-pos true -disp-neg true -disp-zero false" + ) + run_cmd( + "wb_command -disable-provenance -metric-palette " + f"{out_file} {palette_mode} {palette_options}" + ) + gifti_map_name = f"distortion_{metric_id}" if gifti_map_name is None else gifti_map_name + run_cmd(f"wb_command -disable-provenance -set-map-names {out_file} -map 1 {gifti_map_name}") + + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + + return Path(out_file) diff --git a/pantheon/preprocess/surface.py b/pantheon/preprocess/surface.py new file mode 100644 index 0000000..6b77962 --- /dev/null +++ b/pantheon/preprocess/surface.py @@ -0,0 +1,258 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from __future__ import annotations +from typing import Optional, Union, Literal +from pathlib import Path +import tempfile + +from .bold import convert_fwhm_to_sigma +from ..image.gifti import sanitize_gii_metadata +from ..utils.validation import parse_hemi +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +def make_midthickness_surface( + hemi: Literal["L", "R"], wm_file: PathLike, pial_file: PathLike, out_file: PathLike +) -> Path: + """Creates midthickness surface. + + Args: + hemi: Brain hemisphere. + wm_file: White surface file. + pial_file: Pial surface file. + out_file: Output midthickness surface file. + + Returns:inflated_out_file + Midthickness surface file. + """ + + # Parse hemisphere + hemi, structure = parse_hemi(hemi) + # Create midthickness by averaging white and pial surface + print(f"Creating midthickness surface: {Path(out_file).name} ...", flush=True) + run_cmd( + f"wb_command -disable-provenance -surface-average {out_file} " + f"-surf {wm_file} -surf {pial_file}" + ) + # Set GIFTI metadata + run_cmd( + f"wb_command -disable-provenance -set-structure {out_file} " + f"{structure} -surface-type ANATOMICAL -surface-secondary-type MIDTHICKNESS" + ) + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file, da_meta={"Name": ""}) + return Path(out_file) + + +def make_inflated_surface( + midthickness_file: PathLike, + inflated_out_file: PathLike, + very_inflated_file: PathLike, + inflate_extra_scale: Union[float, int] = 1.0, +) -> list[Path]: + """Creates inflated and very inflated surfaces. + + Args: + midthickness_file: Midthickness surface file. + inflated_out_file: Output inflated surface file. + very_inflated_file: Output veryinflated surface file. + inflate_extra_scale: Extra iteration scaling value. This value + is used in function calc_inflation_scale to calculate the + final iteration scaling value. + + Returns: + A tuple (inflated, veryinflated), where inflated is the inflated + surface file and veryinflated is the veryinflated surface file. + """ + + inflation_scale = calc_inflation_scale( + midthickness_file, inflate_extra_scale=inflate_extra_scale + ) + print(f"Creating inflated surfaces: {inflated_out_file} ...", flush=True) + print(f"Inflation scale: {inflation_scale}", flush=True) + run_cmd( + "wb_command -disable-provenance -surface-generate-inflated " + f"{midthickness_file} {inflated_out_file} {very_inflated_file} " + f"-iterations-scale {inflation_scale}" + ) + # Cleanup metadata + _ = sanitize_gii_metadata(inflated_out_file, inflated_out_file, da_meta={"Name": ""}) + _ = sanitize_gii_metadata(very_inflated_file, very_inflated_file, da_meta={"Name": ""}) + return Path(inflated_out_file), Path(very_inflated_file) + + +def make_nomedialwall_roi( + thickness_file: PathLike, + midthickness_file: PathLike, + out_file: PathLike, + gifti_map_name: str = "nomedialwall", +) -> Path: + """Makes (no)medialwall ROI by thresholding the surface thickness. + + Args: + thickness_file: Surface thickness metric file. + midthickness_file: Midthickness surface file. + out_file: Output nomedialwall mask file. + gifti_map_name: GIFTI map name of the out_file. + + Returns: + Nomedialwall mask file. + """ + + # (No)medialwall region defined as vertexs with abs(thickness) > 0 + print( + f"Creating (no)medialwall ROI from surface thickness: {out_file} ...", + flush=True, + ) + run_cmd( + f"wb_command -disable-provenance -metric-math 'thickness > 0' {out_file} " + f"-var thickness {thickness_file}" + ) + run_cmd( + f"wb_command -disable-provenance -metric-fill-holes {midthickness_file} " + f"{out_file} {out_file}" + ) + run_cmd( + f"wb_command -disable-provenance -metric-remove-islands {midthickness_file} " + f"{out_file} {out_file}" + ) + # Set GIFTI metadata + run_cmd(f"wb_command -disable-provenance -set-map-names {out_file} -map 1 {gifti_map_name}") + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + return Path(out_file) + + +def refine_nomedialwall_roi( + roi_file: PathLike, + wrapped_sphere_file: PathLike, + template_roi_file: PathLike, + template_sphere_file: PathLike, + out_file: PathLike, +) -> Path: + """Refines native (no)medialwall ROI using template ROI. + + Args: + roi_file: (no)medialwall ROI in native space with native mesh. + wrapped_sphere_file: Surface sphere file warpped in template + space with native mesh. + template_roi_file: Template (no)medialwall ROI file. + template_sphere_file: Template surface sphere file. + out_file: Output (no)medialwall ROI file. + + Returns: + Refined (no)medialwall ROI file. + """ + + print(f"Refining (no)medialwall ROI: {roi_file} ...", flush=True) + with tempfile.TemporaryDirectory() as tmp_dir: + # Resample template ROI to native space + resampled_roi_file = Path(tmp_dir).joinpath( + "ResampledTemplate_desc-nomedialwall_probseg.shape.gii" + ) + run_cmd( + f"wb_command -disable-provenance -metric-resample {template_roi_file} " + f"{template_sphere_file} {wrapped_sphere_file} BARYCENTRIC " + f"{resampled_roi_file} -largest" + ) + # Combine native and template ROI (mostly add regions near the hippocampus) + run_cmd( + "wb_command -disable-provenance -metric-math '(native + template) > 0' " + f"{out_file} -var native {roi_file} -var template {resampled_roi_file}" + ) + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + return Path(out_file) + + +def mask_metric_nomedialwall( + metric_file: PathLike, + roi_file: PathLike, + midthickness_file: PathLike, + out_file: PathLike, +) -> Path: + """Applies (no)medialwall mask to surface metric. + + Args: + metric_file: Surface metric file. + roi_file: Nomedialwall ROI file. + midthickness_file: Midthickness surface file. + out_file: Output surface metric file. + + Returns: + Masked surface metric file. + """ + + print(f"Masking metric file: {metric_file} ...", flush=True) + # Dilate metric by 10mm + run_cmd( + f"wb_command -disable-provenance -metric-dilate {metric_file} " + f"{midthickness_file} 10 {out_file} -nearest" + ) + # Apply (no)medialwall ROI + run_cmd(f"wb_command -disable-provenance -metric-mask {out_file} {roi_file} {out_file}") + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + return Path(out_file) + + +def smooth_metric( + metric_file: PathLike, + midthickness_file: PathLike, + out_file: PathLike, + smoothing_fwhm: float, + roi_file: Optional[PathLike] = None, +) -> Path: + """Smooths surface metric. + + Args: + metric_file: Surface metric file. + midthickness_file: Midthickness surface file. + out_file: Output surface metric file. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). + roi_file: Surface mask file. Optional. If it is given, smoothing + is constrained within that mask. + + Returns: + Smoothed surface metric file. + """ + + # Parse smoothing fwhm + sigma = convert_fwhm_to_sigma(smoothing_fwhm) + # Smoothing + cmd = ( + "wb_command -disable-provenance -metric-smoothing " + f"{midthickness_file} {metric_file} {sigma} {out_file} " + ) + if roi_file is not None: + cmd += f"-roi {roi_file}" + run_cmd(cmd) + # Cleanup metadata + _ = sanitize_gii_metadata(out_file, out_file) + return Path(out_file) + + +def calc_inflation_scale(in_file: PathLike, inflate_extra_scale: Union[float, int] = 1.0) -> float: + """Calculates surface inflation scale factor. + + Args: + in_file: Surface file used for creating inflated surfaces. + inflate_extra_scale: Extra scaling value multiply to the value + calculated from the number of vertices. + Returns: + Inflation scaling value used in program + 'wb_command -surface-generate-inflated' + """ + + # This formula is from HCPpipeline + # https://github.com/Washington-University/HCPpipelines/blob/1334b35ab863540044333bbdec70a68fb19ab611/PostFreeSurfer/scripts/FreeSurfer2CaretConvertAndRegisterNonlinear.sh#L337 + # Find vertex number + info = run_cmd(f"wb_command -file-information {in_file}", print_output=False).stdout + for i in info.split("\n"): + if "Number of Vertices" in i: + num_vertex = int(i.split(":")[1]) + # Calulate final scale + inflation_scale = inflate_extra_scale * 0.75 * num_vertex / 32492 + return inflation_scale diff --git a/pantheon/preprocess/template.py b/pantheon/preprocess/template.py new file mode 100644 index 0000000..aa00301 --- /dev/null +++ b/pantheon/preprocess/template.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from __future__ import annotations +from typing import Optional, Union +from pathlib import Path +import shutil + +from ..image.gifti import sanitize_gii_metadata +from ..utils.typing import PathLike + + +def copy_template_file( + out_dir: PathLike, + src_dir: Optional[PathLike] = None, + check_output_only: bool = False, +) -> dict[str, Union[list[Path], dict[str, list[Path]]]]: + """Copies standard files from HCP offical release. + + Template folder should be the custom `tpl-HCP_S1200`. + + Args: + out_dir: Ouput directory to store template files. + src_dir: Template file directory. If check_ouput_only is true, + this could be None. + check_output_only: If true, only check whether required files + are presented in the out_dir, instead of copying them from + src_dir. + + Returns: + A dict contains template files path. + + Raises: + FileNotFoundError: Template file is not found in out_dir. + """ + + if check_output_only: + print("Checking standard files from HCP offical release...", flush=True) + else: + print("Copying standard files from HCP offical release...", flush=True) + + # Directory + out_dir = Path(out_dir) + out_dir.mkdir(exist_ok=True, parents=True) + if check_output_only: + src_dir = "" + mesh_dir = Path(src_dir).joinpath("standard_mesh_atlases") + atlas_dir = Path(src_dir).joinpath("S1200_Group_Avg_32k") + lut_dir = Path(src_dir).joinpath("Lut") + config_dir = Path(src_dir).joinpath("Config") + + # Output file record + file_dict = {"gifti": {"L": [], "R": []}, "cifti": [], "volume": [], "other": []} + + # Surface file + copy_list = [] + for hemi in ["L", "R"]: + # fsLR standard sphere surface (164k, 59k, 32k) + src_file = mesh_dir.joinpath(f"fsaverage.{hemi}_LR.spherical_std.164k_fs_LR.surf.gii") + dst_file = out_dir.joinpath(f"fsLR_hemi-{hemi}_space-fsLR_den-164k_sphere.surf.gii") + copy_list.append((src_file, dst_file)) + file_dict["gifti"][hemi].append(dst_file) + for mesh_den in ["59k", "32k"]: + src_file = mesh_dir.joinpath(f"{hemi}.sphere.{mesh_den}_fs_LR.surf.gii") + dst_file = out_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_sphere.surf.gii" + ) + copy_list.append((src_file, dst_file)) + file_dict["gifti"][hemi].append(dst_file) + # fsLR standard surface (only 32k) + for surf_id in ["wm", "pial", "midthickness", "inflated", "veryinflated", "flat"]: + src_file = atlas_dir.joinpath( + f"S1200_hemi-{hemi}_space-fsLR_den-32k_desc-MSMAll_{surf_id}.surf.gii" + ) + dst_file = out_dir.joinpath(f"fsLR_hemi-{hemi}_space-fsLR_den-32k_{surf_id}.surf.gii") + copy_list.append((src_file, dst_file)) + file_dict["gifti"][hemi].append(dst_file) + # fsaverage sphere surface (164k) in fsLR space (164k) (for registration) + src_file = mesh_dir.joinpath( + f"fs_{hemi}", + f"fs_{hemi}-to-fs_LR_fsaverage.{hemi}_LR.spherical_std.164k_fs_{hemi}.surf.gii", + ) + dst_file = out_dir.joinpath(f"fsaverage_hemi-{hemi}_space-fsLR_den-164k_sphere.surf.gii") + copy_list.append((src_file, dst_file)) + file_dict["gifti"][hemi].append(dst_file) + # fsaverage standard sphere surface (164k) (for registration) + src_file = mesh_dir.joinpath( + f"fs_{hemi}", f"fsaverage.{hemi}.sphere.164k_fs_{hemi}.surf.gii" + ) + dst_file = out_dir.joinpath( + f"fsaverage_hemi-{hemi}_space-fsaverage_den-164k_sphere.surf.gii" + ) + copy_list.append((src_file, dst_file)) + file_dict["gifti"][hemi].append(dst_file) + for src_file, dst_file in copy_list: + if check_output_only: + assert dst_file.is_file(), f"File {dst_file} not found." + else: + shutil.copy(src_file, dst_file) + _ = sanitize_gii_metadata( + dst_file, dst_file, da_meta={"AnatomicalStructureSecondary": "MidThickness"} + ) + + # Surface metric file + copy_list = [] + for hemi in ["L", "R"]: + # fsLR sulc metric (164k, or MSM registration) + src_file = mesh_dir.joinpath(f"{hemi}.refsulc.164k_fs_LR.shape.gii") + dst_file = out_dir.joinpath(f"fsLR_hemi-{hemi}_space-fsLR_den-164k_sulc.shape.gii") + copy_list.append((src_file, dst_file)) + file_dict["gifti"][hemi].append(dst_file) + for src_file, dst_file in copy_list: + if check_output_only: + assert dst_file.is_file(), f"File {dst_file} not found." + else: + shutil.copy(src_file, dst_file) + _ = sanitize_gii_metadata(dst_file, dst_file) + + # Surface ROI file + copy_list = [] + for hemi in ["L", "R"]: + # (no)medialwall ROI (164k, 59k, 32k) + for mesh_den in ["164k", "59k", "32k"]: + src_file = mesh_dir.joinpath(f"{hemi}.atlasroi.{mesh_den}_fs_LR.shape.gii") + dst_file = out_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + copy_list.append((src_file, dst_file)) + file_dict["gifti"][hemi].append(dst_file) + for src_file, dst_file in copy_list: + if check_output_only: + assert dst_file.is_file(), f"File {dst_file} not found." + else: + shutil.copy(src_file, dst_file) + _ = sanitize_gii_metadata( + dst_file, dst_file, da_meta={"Name": f"fsLR_hemi-{hemi}_desc-nomedialwall"} + ) + + # CIFTI, volume, lut and MSMSulc config file + copy_list = [] + # Atlas file (32k) + # MMP1, Brodmann and RSN + for label_id in ["MMP1", "Brodmann", "RSN"]: + src_file = atlas_dir.joinpath(f"S1200_space-fsLR_den-32k_desc-{label_id}_dseg.dlabel.nii") + dst_file = out_dir.joinpath(src_file.name.replace("S1200", "fsLR")) + copy_list.append((src_file, dst_file)) + file_dict["cifti"].append(dst_file) + # Schaefer2018 + src_file = atlas_dir.joinpath( + f"Schaefer2018_space-fsLR_den-32k_desc-400Parcels17Networks_dseg.dlabel.nii" + ) + dst_file = out_dir.joinpath(src_file.name.replace("Schaefer2018", "fsLR")) + copy_list.append((src_file, dst_file)) + file_dict["cifti"].append(dst_file) + # Subcortical volume ROI file in MNI space + src_file = mesh_dir.joinpath("Atlas_ROIs.2.nii.gz") + dst_file = out_dir.joinpath("MNI_space-MNI152NLin6Asym_res-2_desc-Subcortical_dseg.nii.gz") + copy_list.append((src_file, dst_file)) + file_dict["volume"].append(dst_file) + # Lut + for fname in ["FreeSurferAllLut.txt", "FreeSurferSubcorticalLabelTableLut.txt"]: + src_file = lut_dir.joinpath(fname) + dst_file = out_dir.joinpath(fname) + copy_list.append((src_file, dst_file)) + file_dict["other"].append(dst_file) + # MSMSulc config + src_file = config_dir.joinpath("MSMSulcStrainFinalconf") + dst_file = out_dir.joinpath("MSMSulcStrainFinalconf") + copy_list.append((src_file, dst_file)) + file_dict["other"].append(dst_file) + for src_file, dst_file in copy_list: + if check_output_only: + if not dst_file.is_file(): + raise FileNotFoundError(f"File {dst_file} is not found.") + else: + shutil.copy(src_file, dst_file) + + return file_dict diff --git a/pantheon/preprocess/volume.py b/pantheon/preprocess/volume.py new file mode 100644 index 0000000..0f67c44 --- /dev/null +++ b/pantheon/preprocess/volume.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: +# + +from __future__ import annotations +from typing import Optional +from pathlib import Path +import shutil +import tempfile + +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +def make_brainmask_from_atlas(atlas_file: PathLike, out_file: PathLike) -> Path: + """Creates brainmask by refining atlas volume. + + Args: + atlas_file: Volume brain atlas file. + out_file: Output brain mask file. + + Returns: + A brain mask file. + """ + + print(f"Creating brainmask from {atlas_file} ...", flush=True) + run_cmd(f"fslmaths {atlas_file} -bin -dilD -dilD -dilD -ero -ero {out_file}") + run_cmd(f"wb_command -disable-provenance -volume-fill-holes {out_file} {out_file}") + run_cmd(f"fslmaths {out_file} -bin {out_file}") + return out_file + + +def warp_atlas_to_reference( + atlas_file: PathLike, + out_file: PathLike, + xfm_file: PathLike, + ref_file: PathLike, + lut_file: Optional[PathLike], +) -> Path: + """Warps atlas file to target space. + + Args: + atlas_file: Volume brain atlas file. + out_file: Output brain atlas file. + xfm_file: Spatial transformation matrix file. It should be a + antsApplyTransforms compatible file. + ref_file: Reference volume file for spatial transformation. + lut_file: Lut file contains label information of the atlas_file. + It is used to import label information to atlas NIFTI image + header. Optional. + + Returns: + A warpped atlas file. + """ + + # Warp atlas to MNI space + print(f"Warpping {atlas_file} to {ref_file} ...", flush=True) + run_cmd( + f"antsApplyTransforms -i {atlas_file} -r {ref_file} -o {out_file} -t {xfm_file} " + "-u int -n MultiLabel" + ) + # Import label information to NIFTI file + if lut_file is not None: + run_cmd( + f"wb_command -disable-provenance -logging SEVERE -volume-label-import " + f"{out_file} {lut_file} {out_file} -discard-others" + ) + return out_file + + +def make_cortical_ribbon( + ref_file: PathLike, + out_file: PathLike, + left_wm_file: Optional[PathLike] = None, + left_pial_file: Optional[PathLike] = None, + right_wm_file: Optional[PathLike] = None, + right_pial_file: Optional[PathLike] = None, + grey_ribbon_value: int = 1, + debug: bool = False, +) -> Path: + """Makes cortical ribbon volume from white and pial surface. + + Args: + ref_file: Volume image file used as reference of generated + cortical ribbon file. + out_file: Output cortical ribbon file. + left_wm_file: Left white surface file. + left_pial_file: Left pial surface file. + right_wm_file: Right white surface file. + right_pial_file: Right pial surface file. + grey_ribbon_value: Index value of the ribbon voxels. + debug: If true, output intermediate files. + + Returns: + A cortical ribbon mask file. + + Raises: + ValueError: No valid wm, pial surface combination is given. + """ + + # Parse input surface file + surf = {} + if (left_wm_file is not None) and (left_pial_file is not None): + surf["L"] = {"wm": left_wm_file, "pial": left_pial_file} + if (right_wm_file is not None) and (right_pial_file is not None): + surf["R"] = {"wm": right_wm_file, "pial": right_pial_file} + if len(surf.keys()) == 0: + raise ValueError( + "No valid surface combination (wm, pial) is found. Require one hemisphere at least." + ) + + ribbon = [] + with tempfile.TemporaryDirectory() as tmp_dir: + + for hemi in surf.keys(): + # Calculate distance between white and pial surface + wm_dist_file = Path(tmp_dir).joinpath(f"hemi-{hemi}_desc-distance_wm.nii.gz") + pial_dist_file = Path(tmp_dir).joinpath(f"hemi-{hemi}_desc-distance_pial.nii.gz") + run_cmd( + f"wb_command -disable-provenance -create-signed-distance-volume " + f"{surf[hemi]['wm']} {ref_file} {wm_dist_file}" + ) + run_cmd( + f"wb_command -disable-provenance -create-signed-distance-volume " + f"{surf[hemi]['pial']} {ref_file} {pial_dist_file}" + ) + + # Thresholding distance file + wm_thr0_file = Path(tmp_dir).joinpath(f"hemi-{hemi}_desc-distance-thr0_wm.nii.gz") + pial_thr0_file = Path(tmp_dir).joinpath(f"hemi-{hemi}_desc-distance-thr0_pial.nii.gz") + run_cmd(f"fslmaths {wm_dist_file} -thr 0 -bin -mul 255 {wm_thr0_file}") + run_cmd(f"fslmaths {wm_thr0_file} -bin {wm_thr0_file}") + run_cmd(f"fslmaths {pial_dist_file} -uthr 0 -abs -bin -mul 255 {pial_thr0_file}") + run_cmd(f"fslmaths {pial_thr0_file} -bin {pial_thr0_file}") + + # Make ribbon volume + ribbon_file = Path(tmp_dir).joinpath(f"hemi-{hemi}_ribbon.nii.gz") + run_cmd(f"fslmaths {pial_thr0_file} -mas {wm_thr0_file} -mul 255 {ribbon_file}") + run_cmd(f"fslmaths {ribbon_file} -bin -mul {grey_ribbon_value} {ribbon_file} -odt int") + ribbon.append(ribbon_file) + + # Combine ribbon from left and right hemispheres + if len(ribbon) == 2: + run_cmd(f"fslmaths {ribbon[0]} -add {ribbon[1]} {out_file}") + else: + shutil.copy(ribbon[0], out_file) + + # Output intermediate files if requested + if debug: + out_dir = Path(out_file).parent + suffix = Path(ref_file).stem.split(".")[0] + shutil.copytree( + tmp_dir, + Path(out_dir).joinpath(f"temp_cortical_ribbon_{suffix}"), + dirs_exist_ok=True, + ) + + return out_file diff --git a/pantheon/preprocess/workflow.py b/pantheon/preprocess/workflow.py new file mode 100644 index 0000000..ee98285 --- /dev/null +++ b/pantheon/preprocess/workflow.py @@ -0,0 +1,3723 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Workflows to preprocess anatomical and functional data.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: +# - These preprocess workflows are intended to use after fMRIPrep +# - It generates HCP-like output files as well spec file for Workbench +# - Functional data is resampled to fsLR space with 32k mesh density and +# MNI152NLin6Asym space with 2mm resolution for subcortical regions. +# This is the canonical functional space in HCP 3T data. + + +from __future__ import annotations +from typing import Optional, Union, Literal + +from pathlib import Path +import shutil +import re +import tempfile + +from .template import copy_template_file +from .freesurfer import ( + convert_freesurfer_geometry_surface, + convert_freesurfer_metric, + convert_freesurfer_annot, + convert_freesurfer_volume, +) +from .surface import ( + make_midthickness_surface, + make_inflated_surface, + make_nomedialwall_roi, + refine_nomedialwall_roi, + mask_metric_nomedialwall, + smooth_metric, +) +from .registration import ( + warp_native_sphere_to_fsLR, + calc_native_to_fsLR_registration_MSMSulc, + calc_registration_distortion, +) +from .volume import make_brainmask_from_atlas, warp_atlas_to_reference, make_cortical_ribbon +from .bold import ( + find_good_voxel, + sample_volume_to_surface, + extract_func_subcortical, + convert_fwhm_to_str, + make_func_map_name, +) +from ..image.gifti import resample_surface, resample_metric, resample_label +from ..image.cifti import make_dense_scalar, make_dense_label, make_dense_timeseries +from ..utils.validation import ( + conform_sub_id, + parse_hemi, + parse_space, + parse_mesh_density, + parse_registration_method, + parse_smoothing_fwhm, +) +from ..utils.shell import run_cmd +from ..utils.typing import PathLike + + +class NativeSurface: + """Native surfaces processing pipeline.""" + + def __init__( + self, + sub_id: Union[int, str], + fs_dir: PathLike, + template_dir: PathLike, + out_dir: PathLike, + ses_id: Optional[str] = None, + run_id: Optional[str] = None, + ) -> None: + """Initializes class. + + Args: + sub_id: SubjectID. + fs_dir: Subject's FreeSurfer output directory. + template_dir: Directory contains required template files. + out_dir: Directory to store output file. + ses_id: SessionID. Used in the filename prefix. For example, + sub-001_ses-01. + run_id: RunID. Used in the filename prefix. For example, + sub-001_run-1. + """ + + ############# + # Directories + ############# + self.sub_id = conform_sub_id(sub_id, with_prefix=False) + self.fs_dir = Path(fs_dir) + self.template_dir = Path(template_dir) + self.out_dir = Path(out_dir) + self.out_dir.mkdir(exist_ok=True, parents=True) + # Output filename prefix + self.anat_prefix = f"sub-{self.sub_id}" + if ses_id: + self.anat_prefix += f"_ses-{ses_id}" + if run_id: + self.anat_prefix += f"_run-{run_id}" + # Store important result files + self.native = {"L": {}, "R": {}} + + def run_native_space_pipeline( + self, + hemi: Literal["L", "R"], + xfm_file: Optional[PathLike] = None, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + msm_config_file: Optional[PathLike] = None, + inflate_extra_scale: Union[float, int] = 1.0, + debug: bool = False, + ) -> dict[str, Path]: + """Runs native surface pipeline. + + Args: + hemi: Brain hemisphere. + xfm_file: An ITK format affine transformation matrix file. + If it is given, applying it to the native surface files. + Optional. + registration_method: Surface-based registration method. If + FS, use FreeSurfer's fsaverage registration to warp + native surfaces to fsLR space. If MSMSulc, calculate + native to fsLR from scratch using MSM program. + msm_config_file: MSMSulc configuration file. Only required + when registration_method is MSMSulc. + inflate_extra_scale: Extra iteration scaling value. This + value is used in function calc_inflation_scale to + calculate the final iteration scaling value. + debug: If true, output intermediate files. + + Returns: + A dict stores generated files. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + + res = {} + # Convert FreeSurfer's native surfaces + out = self.convert_native_surface(hemi, xfm_file=xfm_file, debug=debug) + res.update(out) + # Convert FreeSurfer's metric data + out = self.convert_native_metric(hemi) + res.update(out) + # Convert FreeSurfer's annotation data + out = self.convert_native_annotation(hemi) + res.update(out) + # Make midthinkness and inflated surfaces + out = self.make_aux_native_surface(hemi, inflate_extra_scale=inflate_extra_scale) + res.update(out) + # Make (no)medialwall ROI by thresholding the surface thickness metric + out = self.make_nomedialwall_roi(hemi) + res.update(out) + # Calculate registraion between native and fsLR space + out = self.calc_native_to_fsLR_registration( + hemi, + registration_method=registration_method, + msm_config_file=msm_config_file, + debug=debug, + ) + res.update(out) + # Calculate registration distortion + out = self.calc_registration_distortion(hemi, registration_method=registration_method) + res.update(out) + if registration_method != "FS": + out = self.calc_registration_distortion(hemi, registration_method="FS") + res.update(out) + # Refine (no)medialwall ROI using template ROI + out = self.refine_nomedialwall_roi(hemi, registration_method=registration_method) + res.update(out) + # Apply (no)medialwall ROI mask to metric data (curv, thickness) + out = self.mask_metric_nomedialwall(hemi) + res.update(out) + + return res + + def check_template_data(self): + """Checks common template files in template_dir.""" + + print("\n###Check template file###\n", flush=True) + copy_template_file(self.template_dir, check_output_only=True) + + def convert_native_surface( + self, hemi: Literal["L", "R"], xfm_file: Optional[PathLike] = None, debug: bool = False + ) -> dict[str, Path]: + """Converts FreeSurfer's native surfaces to GIFTI format. + + Args: + hemi: Brain hemisphere. + xfm_file: An ITK format affine transformation matrix file. + If it is given, applying it to the native surface files. + Optional. + debug: If true, output intermediate files. + + Returns: + A dict stores generated files. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi, structure_format="gifti") + + print( + f"\n###Convert surfaces from FreeSurfer's reconstruction (hemi-{hemi})###\n", + flush=True, + ) + res = {} + # white and pial surfaces + for surf_id in ["white", "pial"]: + out_file = convert_freesurfer_geometry_surface( + self.sub_id, + hemi, + surf_id, + self.fs_dir, + self.out_dir, + adjust_cras=True, + xfm_file=xfm_file, + debug=debug, + ) + # rename output file + out_file = out_file.rename( + Path(self.out_dir, out_file.name.replace(f"sub-{self.sub_id}", self.anat_prefix)) + ) + res[f"hemi-{hemi}_{surf_id}"] = out_file + self.native[hemi][f"hemi-{hemi}_{surf_id}"] = out_file + # sphere and sphere.reg + for surf_id in ["sphere", "sphere.reg"]: + out_file = convert_freesurfer_geometry_surface( + self.sub_id, + hemi, + surf_id, + self.fs_dir, + self.out_dir, + adjust_cras=False, + xfm_file=None, + debug=debug, + ) + # rename output file + out_file = out_file.rename( + Path(self.out_dir, out_file.name.replace(f"sub-{self.sub_id}", self.anat_prefix)) + ) + res[f"hemi-{hemi}_{surf_id}"] = out_file + self.native[hemi][f"hemi-{hemi}_{surf_id}"] = out_file + + return res + + def convert_native_metric(self, hemi: Literal["L", "R"]) -> dict[str, Path]: + """Converts FreeSurfer's surface metric to GIFTI format. + + Args: + hemi: Brain hemisphere. + + Returns: + A dict stores generated files. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + + # Metric data: Sulc, curvature, cortical thickness + print(f"\n###Convert FreeSurfer's surface metric (hemi-{hemi})###\n", flush=True) + res = {} + for metric_id in ["sulc", "curv", "thickness"]: + out_file = convert_freesurfer_metric( + self.sub_id, hemi, metric_id, self.fs_dir, self.out_dir + ) + # rename output file + out_file = out_file.rename( + Path(self.out_dir, out_file.name.replace(f"sub-{self.sub_id}", self.anat_prefix)) + ) + res[f"hemi-{hemi}_{metric_id}"] = out_file + self.native[hemi][f"hemi-{hemi}_{metric_id}"] = out_file + + return res + + def convert_native_annotation(self, hemi: Literal["L", "R"]) -> dict[str, Path]: + """Converts FreeSurfer's annotation data to GIFTI format. + + Args: + hemi: Brain hemisphere. + + Returns: + A dict stores generated files. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + + # Annotation: Aparc, Destrieux 2009, DKT + print(f"\n###Convert FreeSurfer's annotation (hemi-{hemi})###\n", flush=True) + res = {} + for annot_id in ["aparc", "aparc.a2009s", "aparc.DKTatlas"]: + out_file = convert_freesurfer_annot( + self.sub_id, hemi, annot_id, self.fs_dir, self.out_dir + ) + # rename output file + out_file = out_file.rename( + Path(self.out_dir, out_file.name.replace(f"sub-{self.sub_id}", self.anat_prefix)) + ) + res[f"hemi-{hemi}_{annot_id}"] = out_file + self.native[hemi][f"hemi-{hemi}_{annot_id}"] = out_file + + return res + + def make_aux_native_surface( + self, hemi: Literal["L", "R"], inflate_extra_scale: Union[float, int] = 1.0 + ) -> dict[str, Path]: + """Makes midthinkness and inflated surfaces in native space. + + Args: + hemi: Brain hemisphere. + inflate_extra_scale: Extra iteration scaling value. This + value is used in function calc_inflation_scale to + calculate the final iteration scaling value. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + + # Required files + wm_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_wm.surf.gii" + ) + pial_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_pial.surf.gii" + ) + for f in [wm_file, pial_file]: + if not f.is_file(): + raise FileNotFoundError( + f"Surface {f} not found. Run function 'prepare_native_surface' first." + ) + + # Output + midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + inflated_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_inflated.surf.gii" + ) + veryinflated_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_veryinflated.surf.gii" + ) + + res = {} + # Midthickness surface + print( + f"\n###Create midthickness surface from white and pial surfaces (hemi-{hemi})###\n", + flush=True, + ) + out_file = make_midthickness_surface(hemi, wm_file, pial_file, midthickness_file) + res[f"hemi-{hemi}_midthickness"] = out_file + self.native[hemi][f"hemi-{hemi}_midthickness"] = out_file + + # Inflated and very inflated surfaces + print( + f"\n###Create inflated surfaces from midthickness surface (hemi-{hemi})###\n", + flush=True, + ) + out_file = make_inflated_surface( + midthickness_file, + inflated_file, + veryinflated_file, + inflate_extra_scale=inflate_extra_scale, + ) + + # Record result + res[f"hemi-{hemi}_inflated"] = out_file[0] + self.native[hemi][f"hemi-{hemi}_inflated"] = out_file[0] + res[f"hemi-{hemi}_veryinflated"] = out_file[1] + self.native[hemi][f"hemi-{hemi}_veryinflated"] = out_file[1] + + return res + + def make_nomedialwall_roi(self, hemi: Literal["L", "R"]) -> dict[str, Path]: + """Makes (no)medialwall ROI by thresholding the surface thickness. + + Args: + hemi: Brain hemisphere. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + + # Required files + thickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_thickness.shape.gii" + ) + midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + if not thickness_file.is_file(): + raise FileNotFoundError( + f"Metric {thickness_file} not found. Run function 'prepare_native_metric' first." + ) + if not midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {midthickness_file} not found. " + "Run function 'make_aux_native_surface' first." + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + gifti_map_name = f"{self.anat_prefix}_hemi-{hemi}_desc-nomedialwall" + + # Make ROI + res = {} + print(f"\n###Create (no)medialwall ROI (hemi-{hemi})###\n", flush=True) + out_file = make_nomedialwall_roi( + thickness_file, midthickness_file, out_file, gifti_map_name=gifti_map_name + ) + + # Record result + res[f"hemi-{hemi}_nomedialwall"] = out_file + self.native[hemi][f"hemi-{hemi}_nomedialwall"] = out_file + + return res + + def calc_native_to_fsLR_registration( + self, + hemi: Literal["L", "R"], + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + msm_config_file: Optional[PathLike] = None, + debug: bool = False, + ) -> dict[str, Path]: + """Calculates registration between native and fsLR space. + + Args: + hemi: Brain hemisphere. + registration_method: Surface-based registration method. If + FS, use FreeSurfer's fsaverage registration to warp + native surfaces to fsLR space. If MSMSulc, calculate + native to fsLR from scratch using MSM program. + msm_config_file: MSMSulc configuration file. Only required + when registration_method is MSMSulc. + debug: If true, output intermediate files. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse registration method + method = parse_registration_method(registration_method) + + # Required files + # native sphere mesh mesh in fsaverage space (generated by FreeSurfer) + in_reg_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsaverage_den-fsnative_sphere.surf.gii" + ) + # template sphere mesh in fsaverage space + proj_sphere_file = self.template_dir.joinpath( + f"fsaverage_hemi-{hemi}_space-fsaverage_den-164k_sphere.surf.gii" + ) + # template fsaverage sphere mesh in fsLR space + unproj_sphere_file = self.template_dir.joinpath( + f"fsaverage_hemi-{hemi}_space-fsLR_den-164k_sphere.surf.gii" + ) + if not in_reg_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {in_reg_sphere_file} is not found. " + "Run function 'prepare_native_surface' first." + ) + for f in [proj_sphere_file, unproj_sphere_file]: + if not f.is_file(): + raise FileNotFoundError( + f"Sphere mesh {f} is not found. " "Run function 'copy_template_data' first." + ) + # for MSMSulc registration + if method == "MSMSulc": + # native mesh in native space + in_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_sphere.surf.gii" + ) + # metric sulc in native space + in_sulc_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_sulc.shape.gii" + ) + # template sphere mesh in fsLR space + ref_sphere_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-164k_sphere.surf.gii" + ) + # template metric sulc in fsLR space + ref_sulc_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-164k_sulc.shape.gii" + ) + if not in_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {in_sphere_file} is not found." + "Run function 'prepare_native_surface' first." + ) + if not in_sulc_file.is_file(): + raise FileNotFoundError( + f"Metric {in_sulc_file} is not found. " + "Run function 'prepare_native_metric' first." + ) + if not ref_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {ref_sphere_file} is not found. " + "Run function 'copy_template_data' first." + ) + if not ref_sulc_file.is_file(): + raise FileNotFoundError( + f"Metric {ref_sulc_file} is not found. " + "Run function 'copy_template_data' first." + ) + + # Output + # native mesh in fsLR space + out_fs_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-FS_sphere.surf.gii" + ) + out_msm_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-MSMSulc_sphere.surf.gii" + ) + + # Calculate registration using FreeSurfer method + print( + f"\n###Warp native sphere to fsLR using FreeSurfer's registration (hemi-{hemi})###\n", + flush=True, + ) + res = {} + out_fs_file = warp_native_sphere_to_fsLR( + in_reg_sphere_file, proj_sphere_file, unproj_sphere_file, out_fs_file + ) + res[f"hemi-{hemi}_registration_FS"] = out_fs_file + self.native[hemi][f"hemi-{hemi}_registration_FS"] = out_fs_file + + # Calculate registration using MSMSulc method + if method == "MSMSulc": + print( + f"\n###Run registration to fsLR space using MSMSulc method (hemi-{hemi})###\n", + flush=True, + ) + out_msm_file = calc_native_to_fsLR_registration_MSMSulc( + hemi, + in_sphere_file, + out_fs_file, + in_sulc_file, + ref_sphere_file, + ref_sulc_file, + out_msm_file, + msm_config_file, + debug=debug, + ) + res[f"hemi-{hemi}_registration_MSMSulc"] = out_msm_file + self.native[hemi][f"hemi-{hemi}_registration_MSMSulc"] = out_msm_file + + return res + + def calc_registration_distortion( + self, hemi: Literal["L", "R"], registration_method: Literal["FS", "MSMSulc"] = "MSMSulc" + ) -> dict[str, Path]: + """Calculates registration distortion. + + Args: + hemi: Brain hemisphere. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse registration method + method = parse_registration_method(registration_method) + + # Required files + src_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_sphere.surf.gii" + ) + warpped_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-{method}_sphere.surf.gii" + ) + if not src_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {src_sphere_file} is not found. " + "Run function 'prepare_native_surface' first." + ) + if not warpped_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {warpped_sphere_file} is not found. " + "Run function 'calc_native_to_fsLR_registration' first." + ) + + # Calculate 4 types of distortion (Areal, Edge, StrainJ, StrainR) + print(f"\n###Calculate registration distortion measurement (hemi-{hemi})###\n", flush=True) + res = {} + for metric_id in ["Areal", "Edge", "StrainJ", "StrainR"]: + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + f"desc-{method}-{metric_id}_distortion.shape.gii" + ).as_posix() + gifti_map_name = f"{self.anat_prefix}_hemi-{hemi}_desc-{method}-{metric_id}" + + # Calculate registration distortion + out_file = calc_registration_distortion( + src_sphere_file, + warpped_sphere_file, + out_file, + metric_id, + gifti_map_name=gifti_map_name, + ) + + # Record result + res[f"hemi-{hemi}_distortion-{metric_id}_{method}"] = out_file + self.native[hemi][f"hemi-{hemi}_distortion-{metric_id}_{method}"] = out_file + + return res + + def refine_nomedialwall_roi( + self, hemi: Literal["L", "R"], registration_method: Literal["FS", "MSMSulc"] = "MSMSulc" + ) -> dict[str, Path]: + """Refines (no)medialwall ROI using template ROI. + + Args: + hemi: Brain hemisphere. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse registration method + method = parse_registration_method(registration_method) + + # Required files + roi_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + warpped_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-{method}_sphere.surf.gii" + ) + template_roi_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-164k_desc-nomedialwall_probseg.shape.gii" + ) + template_sphere_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-164k_sphere.surf.gii" + ) + if not roi_file.is_file(): + raise FileNotFoundError( + f"Nomedialwall ROI {roi_file} is not found. " + "Run function 'make_nomedialwall_roi' first." + ) + if not warpped_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {warpped_sphere_file} is not found. " + "Run function 'calc_native_to_fsLR_registration' first." + ) + if not template_roi_file.is_file(): + raise FileNotFoundError( + f"Nomedialwall ROI {template_roi_file} is not found. " + "Run function 'copy_template_data' first." + ) + if not template_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {template_sphere_file} is not found. " + "Run function 'copy_template_data' first." + ) + + # Output + out_file = roi_file + + # Refine ROI + print(f"\n###Refine nomedialwall ROI (hemi-{hemi})###\n", flush=True) + res = {} + out_file = refine_nomedialwall_roi( + roi_file, warpped_sphere_file, template_roi_file, template_sphere_file, out_file + ) + + # Record result + res[f"hemi-{hemi}_nomedialwall"] = out_file + self.native[hemi][f"hemi-{hemi}_nomedialwall"] = out_file + + return res + + def mask_metric_nomedialwall(self, hemi: Literal["L", "R"]) -> dict[str, Path]: + """Applies (no)medialwall mask to native surface metric file. + + Args: + hemi: Brain hemisphere. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + + print(f"\n###Apply (no)medialwall ROI mask to metrics (hemi-{hemi})###\n", flush=True) + res = {} + for metric_id in ["curv", "thickness"]: + + # Required files + metric_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_{metric_id}.shape.gii" + ) + roi_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + if not metric_file.is_file(): + raise FileNotFoundError( + f"Surface metric {metric_file} is not found. " + "Run function 'prepare_native_metric' first." + ) + if not roi_file.is_file(): + raise FileNotFoundError( + f"Nomedialwall ROI {roi_file} is not found. " + "Run function 'make_nomedialwall_roi' and 'refine_nomedialwall_roi' first." + ) + if not midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {midthickness_file} is not found. " + "Run function 'make_aux_native_surface' first." + ) + + # Output + out_file = metric_file + + # Apply (no)medialwall ROI + out_file = mask_metric_nomedialwall(metric_file, roi_file, midthickness_file, out_file) + + # Record result + res[f"hemi-{hemi}_{metric_id}"] = out_file + self.native[hemi][f"hemi-{hemi}_{metric_id}"] = out_file + + return res + + +class ResampleSurface(NativeSurface): + """Native to fsLR space processing pipeline.""" + + def __init__( + self, + sub_id: Union[int, str], + fs_dir: PathLike, + template_dir: PathLike, + out_dir: PathLike, + ses_id: Optional[str] = None, + run_id: Optional[str] = None, + ) -> None: + """Initializes class. + + Args: + sub_id: SubjectID. + fs_dir: Subject's FreeSurfer output directory. + template_dir: Directory contains required template files. + out_dir: Directory to store output file. + ses_id: SessionID. Used in the filename prefix. For example, + sub-001_ses-01. + run_id: RunID. Used in the filename prefix. For example, + sub-001_run-1. + """ + + super().__init__(sub_id, fs_dir, template_dir, out_dir, ses_id=ses_id, run_id=run_id) + # Store important result files + self.fsLR = {"L": {}, "R": {}} + + def run_resample_fsLR_pipeline( + self, + hemi: Literal["L", "R"], + target_mesh_density: list[str] = ["164k", "32k"], + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + inflate_extra_scale: Union[float, int] = 1.0, + ) -> dict[str, Path]: + """Runs resample native to fsLR space pipeline. + + Args: + hemi: Brain hemisphere. + traget_mesh_density: A list of surface mesh density which + the native space will be resampled to. + registration_method: Surface-based registration method. + inflate_extra_scale: Extra iteration scaling value. This + value is used in function calc_inflation_scale to + calculate the final iteration scaling value. + + Returns: + A dict stores generated files. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse target mesh density + for mesh_den in target_mesh_density: + _ = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + + res = {} + # Resample surface mesh to fsLR space (164k, 32k) + for mesh_den in target_mesh_density: + out = self.resample_native_surface_to_fsLR( + hemi, mesh_den, registration_method=registration_method + ) + res.update(out) + # Make inflated and veryinflated surface in fsLR space (164k, 32k) + for mesh_den in target_mesh_density: + out = self.make_aux_fsLR_surface( + hemi, + mesh_den, + registration_method=registration_method, + inflate_extra_scale=inflate_extra_scale, + ) + res.update(out) + # Resample metric data to fsLR space (164k, 32k) + for mesh_den in target_mesh_density: + out = self.resample_native_metric_to_fsLR( + hemi, + mesh_den, + registration_method=registration_method, + ) + res.update(out) + # Resample registration distortion to fsLR space (164k, 32k) + for mesh_den in target_mesh_density: + out = self.resample_native_distortion_to_fsLR( + hemi, + mesh_den, + registration_method=registration_method, + ) + res.update(out) + # Resample atlas to fsLR space (164k, 32k) + for mesh_den in target_mesh_density: + out = self.resample_native_label_to_fsLR( + hemi, + mesh_den, + registration_method=registration_method, + ) + res.update(out) + + return res + + def resample_native_surface_to_fsLR( + self, + hemi: Literal["L", "R"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Resamples native surfaces to fsLR space + + Args: + hemi: Brain hemisphere. + mesh_den: Target fsLR space mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse registration method + method = parse_registration_method(registration_method) + + # Common required files + template_sphere_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_sphere.surf.gii" + ) + if not template_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {template_sphere_file} is not found. " + "Run function 'copy_template_data' first." + ) + + print(f"\n###Resample native surfaces to fsLR space (hemi-{hemi})###\n", flush=True) + res = {} + for surf_id in ["wm", "pial", "midthickness"]: + + # Required files + surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_{surf_id}.surf.gii" + ) + warpped_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_" + f"desc-{method}_sphere.surf.gii" + ) + if not surf_file.is_file(): + raise FileNotFoundError( + f"Surface {surf_file} is not found. " + "Run function 'prepare_native_surface' first." + ) + if not warpped_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {warpped_sphere_file} is not found. " + "Run function 'calc_native_to_fsLR_registration' first." + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_{surf_id}.surf.gii" + ) + + # Resample surface + print(f"Resampling surface to fsLR {mesh_den} space: {surf_file} ...", flush=True) + out_file = resample_surface( + surf_file, warpped_sphere_file, template_sphere_file, out_file + ) + + # Record result + res[f"hemi-{hemi}_{surf_id}_{mesh_den}_{method}"] = out_file + self.fsLR[hemi][f"hemi-{hemi}_{surf_id}_{mesh_den}_{method}"] = out_file + + # Also create midthickness surface using FS registration if the main method is MSMSulc. + # This surface is required to resample registration distortion files using FS method. + if method != "FS": + + # Required files + surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + warpped_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-FS_sphere.surf.gii" + ) + + if not surf_file.is_file(): + raise FileNotFoundError( + f"Surface {surf_file} is not found. " + "Run function 'prepare_native_surface' first." + ) + if not warpped_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {warpped_sphere_file} is not found. " + "Run function 'calc_native_to_fsLR_registration' first." + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-FS_midthickness.surf.gii" + ) + + # Resample surface + print(f"Resampling surface to fsLR {mesh_den} space: {surf_file} ...", flush=True) + out_file = resample_surface( + surf_file, warpped_sphere_file, template_sphere_file, out_file + ) + + # Record result + res[f"hemi-{hemi}_{surf_id}_{mesh_den}_FS"] = out_file + self.fsLR[hemi][f"hemi-{hemi}_{surf_id}_{mesh_den}_FS"] = out_file + + return res + + def make_aux_fsLR_surface( + self, + hemi: Literal["L", "R"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + inflate_extra_scale: Union[float, int] = 1.0, + ) -> dict[str, Path]: + """Makes inflated surfaces in fsLR space. + + Args: + hemi: Brain hemisphere. + mesh_den: Target fsLR space mesh density. + registration_method: Surface-based registration method. + inflate_extra_scale: Extra iteration scaling value. This + value is used in function calc_inflation_scale to + calculate the final iteration scaling value. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse registration method + method = parse_registration_method(registration_method) + + # Required files + midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_midthickness.surf.gii" + ) + if not midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {midthickness_file} is not found. " + "Run function 'resample_native_surface_to_fsLR' first." + ) + + # Output + inflated_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_inflated.surf.gii" + ) + veryinflated_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_veryinflated.surf.gii" + ) + + # Make inflated and very inflated surface + print(f"\n###Create inflated surfaces in fsLR space (hemi-{hemi})###\n", flush=True) + res = {} + out_file = make_inflated_surface( + midthickness_file, + inflated_file, + veryinflated_file, + inflate_extra_scale=inflate_extra_scale, + ) + + # Record result + res[f"hemi-{hemi}_inflated_{mesh_den}_{method}"] = out_file[0] + self.fsLR[hemi][f"hemi-{hemi}_inflated_{mesh_den}_{method}"] = out_file[0] + res[f"hemi-{hemi}_veryinflated_{mesh_den}_{method}"] = out_file[1] + self.fsLR[hemi][f"hemi-{hemi}_veryinflated_{mesh_den}_{method}"] = out_file[1] + + return res + + def resample_native_metric_to_fsLR( + self, + hemi: Literal["L", "R"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Resamples native surface metric to fsLR space. + + Args: + hemi: Brain hemisphere. + mesh_den: Target fsLR space mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse registration method + method = parse_registration_method(registration_method) + + # Common required files + warpped_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-{method}_sphere.surf.gii" + ) + template_sphere_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_sphere.surf.gii" + ) + midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + warpped_midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_midthickness.surf.gii" + ) + roi_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + template_roi_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + if not warpped_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {warpped_sphere_file} is not found. " + "Run function 'calc_native_to_fsLR_registration' first." + ) + if not template_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {template_sphere_file} is not found. " + "Run function 'copy_template_data' first." + ) + if not midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {midthickness_file} is not found. " + "Run function 'make_aux_native_surface' first." + ) + if not warpped_midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {warpped_midthickness_file} is not found. " + "Run function 'resample_native_surface_to_fsLR' first." + ) + if not roi_file.is_file(): + raise FileNotFoundError( + f"Nomedialwall ROI {roi_file} is not found. " + "Run function 'make_nomedialwall_roi' and 'refine_nomedialwall_roi' first." + ) + if not template_roi_file.is_file(): + raise FileNotFoundError( + f"Nomedialwall ROI {template_roi_file} is not found. " + "Run function 'copy_template_data' first." + ) + + print(f"\n###Resample metric data to fsLR space (hemi-{hemi})###\n", flush=True) + res = {} + for metric_id in ["sulc", "curv", "thickness"]: + + # Required file + metric_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_{metric_id}.shape.gii" + ) + if not metric_file.is_file(): + raise FileNotFoundError( + f"Surface metric {metric_file} is not found. " + "Run function 'prepare_native_metric' first." + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_{metric_id}.shape.gii" + ) + + # Resample metric + print(f"Resampling metric to fsLR {mesh_den} space: {metric_file} ...", flush=True) + if metric_id in ["curv", "thickness"]: + out_file = resample_metric( + metric_file, + warpped_sphere_file, + template_sphere_file, + out_file, + current_area_surf_file=midthickness_file, + target_area_surf_file=warpped_midthickness_file, + roi_file=roi_file, + resample_method="ADAP_BARY_AREA", + ) + run_cmd( + "wb_command -disable-provenance -metric-mask " + f"{out_file} {template_roi_file} {out_file}" + ) + if metric_id == "sulc": + out_file = resample_metric( + metric_file, + warpped_sphere_file, + template_sphere_file, + out_file, + current_area_surf_file=midthickness_file, + target_area_surf_file=warpped_midthickness_file, + resample_method="ADAP_BARY_AREA", + ) + + # Record result + res[f"hemi-{hemi}_{metric_id}_{mesh_den}_{method}"] = out_file + self.fsLR[hemi][f"hemi-{hemi}_{metric_id}_{mesh_den}_{method}"] = out_file + + return res + + def resample_native_distortion_to_fsLR( + self, + hemi: Literal["L", "R"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Resamples native registration distortion to fsLR space. + + Args: + hemi: Brain hemisphere. + mesh_den: Target fsLR space mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse registration method + # resample FS method registration distortion, if the main method is not + method = parse_registration_method(registration_method) + method_list = [method, "FS"] if method != "FS" else [method] + + # Common required files + warpped_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-{method}_sphere.surf.gii" + ) + template_sphere_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_sphere.surf.gii" + ) + midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + warpped_midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_midthickness.surf.gii" + ) + if not warpped_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {warpped_sphere_file} is not found. " + "Run function 'calc_native_to_fsLR_registration' first." + ) + if not template_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {template_sphere_file} is not found. " + "Run function 'copy_template_data' first." + ) + if not midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {midthickness_file} is not found. " + "Run function 'make_aux_native_surface' first." + ) + if not warpped_midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {warpped_midthickness_file} is not found. " + "Run function 'resample_native_surface_to_fsLR' first." + ) + + print( + f"\n###Resample registration distortion to fsLR space (hemi-{hemi})###\n", flush=True + ) + res = {} + for method in method_list: + for metric_id in ["Areal", "Edge", "StrainJ", "StrainR"]: + + # Required file + metric_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + f"desc-{method}-{metric_id}_distortion.shape.gii" + ) + if not metric_file.is_file(): + raise FileNotFoundError( + f"Distortion metric {metric_file} is not found. " + "Run function 'calc_registration_distortion' first." + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}-{metric_id}_distortion.shape.gii" + ) + + # Resample metric + print(f"Resampling metric to fsLR {mesh_den} space: {metric_file} ...", flush=True) + out_file = resample_metric( + metric_file, + warpped_sphere_file, + template_sphere_file, + out_file, + current_area_surf_file=midthickness_file, + target_area_surf_file=warpped_midthickness_file, + resample_method="ADAP_BARY_AREA", + ) + + # Record result + res[f"hemi-{hemi}_{metric_id}_{mesh_den}_{method}"] = out_file + self.fsLR[hemi][f"hemi-{hemi}_{metric_id}_{mesh_den}_{method}"] = out_file + + return res + + def resample_native_label_to_fsLR( + self, + hemi: Literal["L", "R"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Resamples native surface annotation to fsLR space. + + Args: + hemi: Brain hemisphere. + mesh_den: Target fsLR space mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse registration method + method = parse_registration_method(registration_method) + + # Common required files + warpped_sphere_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-{method}_sphere.surf.gii" + ) + template_sphere_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_sphere.surf.gii" + ) + midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + warpped_midthickness_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_midthickness.surf.gii" + ) + if not warpped_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {warpped_sphere_file} is not found. " + "Run function 'calc_native_to_fsLR_registration' first." + ) + if not template_sphere_file.is_file(): + raise FileNotFoundError( + f"Sphere mesh {template_sphere_file} is not found. " + "Run function 'copy_template_data' first." + ) + if not midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {midthickness_file} is not found. " + "Run function 'make_aux_native_surface' first." + ) + if not warpped_midthickness_file.is_file(): + raise FileNotFoundError( + f"Surface {warpped_midthickness_file} is not found. " + "Run function 'resample_native_surface_to_fsLR' first." + ) + + print(f"\n###Resample atlas to fsLR space (hemi-{hemi})###\n", flush=True) + res = {} + for atlas_id in ["Aparc", "Destrieux", "DKT"]: + + # Required file + atlas_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + f"desc-{atlas_id}_dseg.label.gii" + ) + if not atlas_file.is_file(): + raise FileNotFoundError( + f"Atlas {atlas_file} is not found. " + "Run function 'prepare_native_annotation' first." + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}-{atlas_id}_dseg.label.gii" + ) + + # Resample label + print(f"Resampling atlas to fsLR {mesh_den} space: {atlas_file} ...", flush=True) + out_file = resample_label( + atlas_file, + warpped_sphere_file, + template_sphere_file, + out_file, + current_area_surf_file=midthickness_file, + target_area_surf_file=warpped_midthickness_file, + resample_method="ADAP_BARY_AREA", + ) + + # Record result + res[f"hemi-{hemi}_{atlas_id}_{mesh_den}_{method}"] = out_file + self.fsLR[hemi][f"hemi-{hemi}_{atlas_id}_{mesh_den}_{method}"] = out_file + + return res + + +class FreeSurferVolume(ResampleSurface): + """FreeSurfer volume processing pipeline.""" + + def __init__( + self, + sub_id: Union[int, str], + fs_dir: PathLike, + template_dir: PathLike, + out_dir: PathLike, + ses_id: Optional[str] = None, + run_id: Optional[str] = None, + ) -> None: + """Initializes class. + + Args: + sub_id: SubjectID. + fs_dir: Subject's FreeSurfer output directory. + template_dir: Directory contains required template files. + out_dir: Directory to store output file. + ses_id: SessionID. Used in the filename prefix. For example, + sub-001_ses-01. + run_id: RunID. Used in the filename prefix. For example, + sub-001_run-1. + """ + + super().__init__(sub_id, fs_dir, template_dir, out_dir, ses_id=ses_id, run_id=run_id) + # Store important result files + self.volume = {} + + def run_volume_pipeline( + self, + xfm_file: PathLike, + ref_file: PathLike, + xfm_mni_file: PathLike, + ref_mni_file: PathLike, + lut_file: PathLike, + lut_subcortical_file: PathLike, + ) -> dict[str, Path]: + """Runs FreeSurfer volume pipeline. + + Args: + xfm_file: An ITK format affine transformation matrix file. + Usually it is used to adjust the difference between the + original and FreeSurfer conformed T1w images. + ref_file: Reference volume file for xfm_file. + xfm_mni_file: An ITK format nonlinear transformation matrix + file. It tranforms image in T1w space to MNI152NLin6Asym + space. + ref_mni_file: Reference volume file for xfm_mni_file. + lut_file: Lut file contains label information of + FreeSurfer's parcellations. It is used to import label + information to parcellation NIFTI image header. + lut_subcortical_file: Lut file contains label information of + FreeSurfer's subcortical segmentation. It is used to + import label information to parcellation NIFTI image + header. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The ref_mni_file is not in MNI152NLin6Asym_res-2 + space. + """ + + res = {} + # Convert FreeSurfer's volume to NIFTI format + out = self.convert_freesurfer_volume( + xfm_file=xfm_file, ref_file=ref_file, lut_file=lut_file + ) + res.update(out) + # Make brainmask + out = self.make_brainmask_from_wmparc() + res.update(out) + # Warp subcortrical ROI to MNI space + # check MNI reference file + if not "MNI152NLin6Asym_res-2" in Path(ref_mni_file).name: + raise ValueError("MNI reference file should be in MNI152NLin6Asym_res-2 space.") + out = self.warp_subcortical_roi( + xfm_mni_file, + ref_mni_file, + lut_file=lut_subcortical_file, + space="MNI152NLin6Asym_res-2", + ) + res.update(out) + + return res + + def convert_freesurfer_volume( + self, + xfm_file: Optional[PathLike] = None, + ref_file: Optional[PathLike] = None, + lut_file: Optional[PathLike] = None, + ) -> dict[str, Path]: + """Converts FreeSurfer's volume to NIFTI format. + + Note: FreeSurfer's interal anatomical image is in a conformed + space. This space might has small differences to the input T1w + image. This could be mitigate by using the fsnative to T1w + transformation matrix generated by fMRIPrep. + + Args: + xfm_file: An ITK format affine transformation matrix file. + If it is given, applying it to the volume file. + Optional. + ref_file: Reference volume file for xfm_file. Optional. + lut_file: Lut file contains label information of + FreeSurfer's parcellations. It is used to import label + information to parcellation NIFTI image header. + Optional. + + Returns: + A dict stores generated files. + """ + + print(f"\n###Convert FreeSurfer's volume###\n", flush=True) + volume_list = ["T1", "wmparc", "aparc.a2009s+aseg", "aparc+aseg", "aparc.DKTatlas+aseg"] + res = {} + for volume_id in volume_list: + out_file = convert_freesurfer_volume( + self.sub_id, + volume_id, + self.fs_dir, + self.out_dir, + xfm_file=xfm_file, + ref_file=ref_file, + lut_file=lut_file, + ) + # rename output file + out_file = out_file.rename( + Path(self.out_dir, out_file.name.replace(f"sub-{self.sub_id}", self.anat_prefix)) + ) + res[volume_id], self.volume[volume_id] = out_file, out_file + + return res + + def make_brainmask_from_wmparc(self) -> dict[str, Path]: + """Makes brainmask from FreeSurfer's wmparc file.""" + + # Required file + atlas_file = self.out_dir.joinpath(f"{self.anat_prefix}_space-T1w_desc-WMParc_dseg.nii.gz") + if not atlas_file.is_file(): + raise FileNotFoundError( + f"File {atlas_file} is not found. " "Run 'convert_freesurfer_volume' first." + ) + # Output + out_file = self.out_dir.joinpath(f"{self.anat_prefix}_space-T1w_desc-brain-FS_mask.nii.gz") + # Make brainmask + print(f"\n###Make brainmask from WMParc###\n", flush=True) + res = {} + out_file = make_brainmask_from_atlas(atlas_file, out_file) + res["brainmask"], self.volume["brainmask"] = out_file, out_file + + return res + + def warp_subcortical_roi( + self, + xfm_file: PathLike, + ref_file: PathLike, + lut_file: Optional[PathLike] = None, + space: str = "MNI152NLin6Asym_res-2", + ) -> dict[str, Path]: + """Warps FreeSurfer's segmentation to target space. + + Note: FreeSurfer's interal anatomical image is in a conformed + space. This space might has small differences to the input T1w + image. This could be mitigate by using the fsnative to T1w + transformation matrix generated by fMRIPrep. + + Args: + xfm_file: An ITK format affine transformation matrix file. + If it is given, applying it to the volume file. + ref_file: Reference volume file for xfm_file. + lut_file: Lut file contains label information of + FreeSurfer's parcellations. It is used to import label + information to parcellation NIFTI image header. + Optional. + space: Spatial space of the output file. It should match the + given transformation xfm_file. + + Returns: + A dict stores generated files. + + Raises: + FileNotFoundError: Required file is not found. + """ + + # Required file + atlas_file = self.out_dir.joinpath(f"{self.anat_prefix}_space-T1w_desc-WMParc_dseg.nii.gz") + if not atlas_file.is_file(): + raise FileNotFoundError( + f"File {atlas_file} is not found. " "Run 'convert_freesurfer_volume' first." + ) + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_space-{space}_desc-Subcortical_dseg.nii.gz" + ) + # Warp file + print(f"\n###Warp subcortical ROI to {space} space###\n", flush=True) + res = {} + out_file = warp_atlas_to_reference( + atlas_file, out_file, xfm_file, ref_file, lut_file=lut_file + ) + res[f"Subcortrcal_{space}"], self.volume[f"Subcortrcal_{space}"] = out_file, out_file + # Copy reference file to output directory + shutil.copy(ref_file, self.out_dir.joinpath(ref_file.name)) + + return res + + +class Anatomical(FreeSurferVolume): + """Surface-based anatomical processing pipeline.""" + + def __init__( + self, + sub_id: Union[int, str], + fs_dir: PathLike, + template_dir: PathLike, + out_dir: PathLike, + ses_id: Optional[str] = None, + run_id: Optional[str] = None, + ) -> None: + """Initializes class. + + Args: + sub_id: SubjectID. + fs_dir: Subject's FreeSurfer output directory. + template_dir: Directory contains required template files. + out_dir: Directory to store output file. + ses_id: SessionID. Used in the filename prefix. For example, + sub-001_ses-01. + run_id: RunID. Used in the filename prefix. For example, + sub-001_run-1. + """ + + super().__init__(sub_id, fs_dir, template_dir, out_dir, ses_id=ses_id, run_id=run_id) + # Store important result files + self.cifti = {} + + def run_anatomical_pipeline( + self, + xfm_file: PathLike, + ref_file: PathLike, + xfm_mni_file: PathLike, + ref_mni_file: PathLike, + keep_gifti_native: bool = False, + keep_gifti_fsLR: bool = False, + lut_file: Optional[PathLike] = None, + lut_subcortical_file: Optional[PathLike] = None, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + msm_config_file: Optional[PathLike] = None, + inflate_extra_scale: Union[float, int] = 1.0, + debug: bool = False, + ) -> dict[str, Path]: + """Runs full surface-based anatomical process pipeline. + + Args: + xfm_file: An ITK format affine transformation matrix file. + Usually it is used to adjust the difference between the + original and FreeSurfer conformed T1w images. + ref_file: Reference volume file for xfm_file. + xfm_mni_file: An ITK format nonlinear transformation matrix + file. It tranforms image in T1w space to MNI152NLin6Asym + space. + ref_mni_file: Reference volume file for xfm_mni_file. + keep_gifti_native: If ture, keep native space GIFTI files. + keep_gifti_fsLR: If true, keep fsLR space GIFTI files. + lut_file: Lut file contains label information of + FreeSurfer's parcellations. It is used to import label + information to parcellation NIFTI image header. + lut_subcortical_file: Lut file contains label information of + FreeSurfer's subcortical segmentation. It is used to + import label information to parcellation NIFTI image + header. + registration_method: Surface-based registration method. + msm_config_file: MSMSulc configuration file. Only required + when registration_method is MSMSulc. + inflate_extra_scale: Extra iteration scaling value. This + value is used in function calc_inflation_scale to + calculate the final iteration scaling value. + debug: If true, output intermediate files. + + Returns: + A dict stores generated files. + """ + + # Parse inputs + if lut_file is None: + lut_file = self.template_dir.joinpath("FreeSurferAllLut.txt") + if lut_subcortical_file is None: + lut_subcortical_file = self.template_dir.joinpath( + "FreeSurferSubcorticalLabelTableLut.txt" + ) + if msm_config_file is None: + msm_config_file = self.template_dir.joinpath("MSMSulcStrainFinalconf") + if not msm_config_file.is_file(): + raise FileNotFoundError("MSMSulc config!") + + print(f"Starting anatomical pipeline: sub-{self.sub_id}!", flush=True) + + res = {} + # Check common template data in template_dir + self.check_template_data() + # Native surface pipeline + for hemi in ["L", "R"]: + out = self.run_native_space_pipeline( + hemi, + xfm_file=xfm_file, + registration_method=registration_method, + msm_config_file=msm_config_file, + inflate_extra_scale=inflate_extra_scale, + debug=debug, + ) + res.update(out) + # Resample pipeline + for hemi in ["L", "R"]: + out = self.run_resample_fsLR_pipeline( + hemi, + target_mesh_density=["164k", "32k"], + registration_method=registration_method, + inflate_extra_scale=inflate_extra_scale, + ) + res.update(out) + # Volume pipeline + out = self.run_volume_pipeline( + xfm_file, ref_file, xfm_mni_file, ref_mni_file, lut_file, lut_subcortical_file + ) + res.update(out) + # CIFTI file pipeline + out = self.run_cifti_pipeline(registration_method=registration_method) + res.update(out) + # Make spec file for HCP workbench + _ = self.make_spec_file("fsnative", "fsnative") + _ = self.make_spec_file("fsLR", "164k") + _ = self.make_spec_file("fsLR", "32k") + # Cleanup + self.remove_unnecessary_file( + keep_gifti_native=keep_gifti_native, keep_gifti_fsLR=keep_gifti_fsLR + ) + + print(f"\nAnatomical pipeline finished!\n", flush=True) + + return res + + def run_cifti_pipeline( + self, registration_method: Literal["FS", "MSMSulc"] = "MSMSulc" + ) -> dict[str, Path]: + """Runs CIFTI file pipeline. + + Args: + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + """ + + res = {} + # Make metric data dscalar CIFTI file + for space, mesh_den in [("fsnative", "fsnative"), ("fsLR", "164k"), ("fsLR", "32k")]: + out = self.make_metric_cifti(space, mesh_den, registration_method=registration_method) + res.update(out) + # Make registration distortion dscalar CIFTI file + for space, mesh_den in [("fsnative", "fsnative"), ("fsLR", "164k"), ("fsLR", "32k")]: + out = self.make_distortion_cifti( + space, mesh_den, registration_method=registration_method + ) + res.update(out) + # Make atlas dlabel CIFTI file + for space, mesh_den in [("fsnative", "fsnative"), ("fsLR", "164k"), ("fsLR", "32k")]: + out = self.make_label_cifti(space, mesh_den, registration_method=registration_method) + res.update(out) + + return res + + def make_metric_cifti( + self, + space: Literal["fsLR", "fsnative"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Makes metric dense scalar CIFTI file. + + Args: + space: Surface space. + mesh_den: Surface mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + ValueError: mesh_den is not fsnative when space is fsnative. + """ + + # Parse surface space + space = parse_space(space, valid_list=["fsLR", "fsnative"]) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den) + if space == "fsnative" and mesh_den != "fsnative": + raise ValueError("If surface space is fsnative, mesh_den should be fsnative as well") + # Parse registration method + method = parse_registration_method(registration_method) + # Addition filename modifier + desc = "" if space == "fsnative" else f"_desc-{method}" + + print( + f"\n###Make metric dscalar CIFTI file (space-{space}_den-{mesh_den})###\n", flush=True + ) + res = {} + for metric_id in ["sulc", "curv", "thickness"]: + + # Required files + left_surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-L_space-{space}_den-{mesh_den}{desc}_{metric_id}.shape.gii" + ) + right_surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-R_space-{space}_den-{mesh_den}{desc}_{metric_id}.shape.gii" + ) + if space == "fsnative": + left_roi_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-L_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + right_roi_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-R_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + else: + left_roi_file = self.template_dir.joinpath( + f"{space}_hemi-L_space-{space}_den-{mesh_den}_" + "desc-nomedialwall_probseg.shape.gii" + ) + right_roi_file = self.template_dir.joinpath( + f"{space}_hemi-R_space-{space}_den-{mesh_den}_" + "desc-nomedialwall_probseg.shape.gii" + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_space-{space}_den-{mesh_den}{desc}_{metric_id}.dscalar.nii" + ) + cifti_map_name = f"{self.anat_prefix}_space-{space}_den-{mesh_den}{desc}_{metric_id}" + + # Make dense scalar + if metric_id == "sulc": + out_file = make_dense_scalar( + out_file, + left_surf_file=left_surf_file, + right_surf_file=right_surf_file, + cifti_map_name=cifti_map_name, + ) + else: + out_file = make_dense_scalar( + out_file, + left_surf_file=left_surf_file, + right_surf_file=right_surf_file, + left_roi_file=left_roi_file, + right_roi_file=right_roi_file, + cifti_map_name=cifti_map_name, + ) + + # Set map palette + if metric_id == "thickness": + run_cmd( + f"wb_command -disable-provenance -cifti-palette {out_file} " + f"MODE_AUTO_SCALE_PERCENTAGE {out_file} -pos-percent 4 96 -interpolate true " + "-palette-name videen_style -disp-pos true -disp-neg false -disp-zero false" + ) + else: + run_cmd( + f"wb_command -disable-provenance -cifti-palette {out_file} " + f"MODE_AUTO_SCALE_PERCENTAGE {out_file} -pos-percent 2 98 " + "-palette-name Gray_Interp -disp-pos true -disp-neg true -disp-zero true" + ) + + # Record result + if space == "fsnative": + res[metric_id], self.cifti[metric_id] = out_file, out_file + else: + res[f"{space}_{mesh_den}_{metric_id}"] = out_file + self.cifti[f"{space}_{mesh_den}_{metric_id}"] = out_file + + return res + + def make_distortion_cifti( + self, + space: Literal["fsLR", "fsnative"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Makes registration distortion dense scalar CIFTI file. + + Args: + space: Surface space. + mesh_den: Surface mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + ValueError: mesh_den is not fsnative when space is fsnative. + """ + + # Parse surface space + space = parse_space(space, valid_list=["fsLR", "fsnative"]) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den) + if space == "fsnative" and mesh_den != "fsnative": + raise ValueError("If surface space is fsnative, mesh_den should be fsnative as well") + # Parse registration method + # also make FS method registration distortion file, if the main method is not + method = parse_registration_method(registration_method) + method_list = [method, "FS"] if method != "FS" else [method] + + print( + "\n###Make registration distortion dscalar CIFTI file " + f"(space-{space}_den-{mesh_den})###\n", + flush=True, + ) + res = {} + for method in method_list: + for metric_id in ["Areal", "Edge", "StrainJ", "StrainR"]: + + # Required files + left_surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-L_space-{space}_den-{mesh_den}_" + f"desc-{method}-{metric_id}_distortion.shape.gii" + ) + right_surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-R_space-{space}_den-{mesh_den}_" + f"desc-{method}-{metric_id}_distortion.shape.gii" + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_space-{space}_den-{mesh_den}_" + f"desc-{method}-{metric_id}_distortion.dscalar.nii" + ) + cifti_map_name = ( + f"{self.anat_prefix}_space-{space}_den-{mesh_den}_desc-{method}-{metric_id}" + ) + + # Make dense scalar + out_file = make_dense_scalar( + out_file, + left_surf_file=left_surf_file, + right_surf_file=right_surf_file, + cifti_map_name=cifti_map_name, + ) + + # Set map palette + run_cmd( + f"wb_command -disable-provenance -cifti-palette {out_file} " + f"MODE_USER_SCALE {out_file} -pos-user 0 1 -neg-user 0 -1 -interpolate true " + "-palette-name ROY-BIG-BL -disp-pos true -disp-neg true -disp-zero false" + ) + + # Record result + res[f"{space}_{mesh_den}_{method}-{metric_id}"] = out_file + self.cifti[f"{space}_{mesh_den}_{method}-{metric_id}"] = out_file + + return res + + def make_label_cifti( + self, + space: Literal["fsLR", "fsnative"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Makes FreeSurfer's annotation dense label CIFTI file. + + Args: + space: Surface space. + mesh_den: Surface mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + ValueError: mesh_den is not fsnative when space is fsnative. + """ + + # Parse surface space + space = parse_space(space, valid_list=["fsLR", "fsnative"]) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den) + if space == "fsnative" and mesh_den != "fsnative": + raise ValueError("If surface space is fsnative, mesh_den should be fsnative as well") + # Parse registration method + method = parse_registration_method(registration_method) + # Addition filename modifier + desc = "_desc" if space == "fsnative" else f"_desc-{method}" + + print(f"\n###Make atlas dlabel CIFTI file (space-{space}_den-{mesh_den})###\n", flush=True) + res = {} + for atlas_id in ["Aparc", "Destrieux", "DKT"]: + + # Required files + if space == "fsnative": + left_surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-L_space-{space}_den-{mesh_den}_" + f"desc-{atlas_id}_dseg.label.gii" + ) + right_surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-R_space-{space}_den-{mesh_den}_" + f"desc-{atlas_id}_dseg.label.gii" + ) + left_roi_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-L_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + right_roi_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-R_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + else: + left_surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-L_space-{space}_den-{mesh_den}_" + f"desc-{method}-{atlas_id}_dseg.label.gii" + ) + right_surf_file = self.out_dir.joinpath( + f"{self.anat_prefix}_hemi-R_space-{space}_den-{mesh_den}_" + f"desc-{method}-{atlas_id}_dseg.label.gii" + ) + left_roi_file = self.template_dir.joinpath( + f"{space}_hemi-L_space-{space}_den-{mesh_den}_" + "desc-nomedialwall_probseg.shape.gii" + ) + right_roi_file = self.template_dir.joinpath( + f"{space}_hemi-R_space-{space}_den-{mesh_den}_" + "desc-nomedialwall_probseg.shape.gii" + ) + + # Output + out_file = self.out_dir.joinpath( + f"{self.anat_prefix}_space-{space}_den-{mesh_den}{desc}-{atlas_id}_dseg.dlabel.nii" + ) + cifti_map_name = f"{self.anat_prefix}_space-{space}_den-{mesh_den}{desc}-{atlas_id}" + + # Make dense label + out_file = make_dense_label( + out_file, + left_surf_file=left_surf_file, + right_surf_file=right_surf_file, + left_roi_file=left_roi_file, + right_roi_file=right_roi_file, + cifti_map_name=cifti_map_name, + ) + + # Record result + if space == "fsnative": + res[atlas_id], self.cifti[atlas_id] = out_file, out_file + else: + res[f"{space}_{mesh_den}_{method}-{atlas_id}"] = out_file + self.cifti[f"{space}_{mesh_den}_{method}-{atlas_id}"] = out_file + + return res + + def make_spec_file( + self, + space: Literal["fsLR", "fsnative"], + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> Path: + """Makes spec file for HCP Workbench. + + Args: + space: Surface space. + mesh_den: Surface mesh density. + registration_method: Surface-based registration method. + + Returns: + A HCP workbench spec file. + """ + + # Parse surface space + space = parse_space(space, valid_list=["fsLR", "fsnative"]) + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den) + if space == "fsnative" and mesh_den != "fsnative": + raise ValueError("If surface space is fsnative, mesh_den should be fsnative as well") + + # Output + spec_file = self.out_dir.joinpath( + f"{self.anat_prefix}_space-{space}_den-{mesh_den}.wb.spec" + ) + spec_file.unlink(missing_ok=True) + + print(f"\n###Create spec file (space-{space}_den-{mesh_den})###\n", flush=True) + + # Surface and (no)medialwall ROI + for hemi, structure in [("L", "CORTEX_LEFT"), ("R", "CORTEX_RIGHT")]: + file_list = [] + file_list += sorted( + self.out_dir.glob(f"sub-*_hemi-{hemi}_space-{space}_den-{mesh_den}*.surf.gii") + ) + file_list += list( + self.out_dir.glob( + f"sub-*_hemi-{hemi}_space-{space}_den-{mesh_den}*probseg.shape.gii" + ) + ) + # exclude surfaces used FS registration method if it's not the main method + if registration_method != "FS": + file_list = [f for f in file_list if "desc-FS" not in f.name] + for f in file_list: + run_cmd(f"wb_command -add-to-spec-file {spec_file} {structure} {f}") + # Metric data, atlas + file_list = [] + for metric_id in ["sulc", "curv", "thickness"]: + file_list += list( + self.out_dir.glob(f"sub-*_space-{space}_den-{mesh_den}*{metric_id}.dscalar.nii") + ) + file_list += sorted( + self.out_dir.glob(f"sub-*_space-{space}_den-{mesh_den}*_dseg.dlabel.nii") + ) + for f in file_list: + run_cmd(f"wb_command -add-to-spec-file {spec_file} INVALID {f}") + + if space == "fsLR": + for hemi, structure in [("L", "CORTEX_LEFT"), ("R", "CORTEX_RIGHT")]: + file_list = [] + # Template surface + file_list += sorted( + self.template_dir.glob( + f"fsLR_hemi-{hemi}_space-{space}_den-{mesh_den}*.surf.gii" + ) + ) + # Nomedialwall ROI + file_list += list( + self.template_dir.glob( + f"fsLR_hemi-{hemi}_space-{space}_den-{mesh_den}*probseg.shape.gii" + ) + ) + for f in file_list: + run_cmd(f"wb_command -add-to-spec-file {spec_file} {structure} {f}") + # Atlas (only 32k) + if mesh_den == "32k": + file_list = [] + file_list += sorted( + self.template_dir.glob(f"fsLR_space-fsLR_den-32k*_dseg.dlabel.nii") + ) + for f in file_list: + run_cmd(f"wb_command -add-to-spec-file {spec_file} INVALID {f}") + # Volume + file_list = [] + file_list += sorted(self.out_dir.glob("sub-*_space-MNI152NLin6Asym*.nii.gz")) + file_list += sorted(self.template_dir.glob("MNI*_space-MNI152NLin6Asym*.nii.gz")) + for f in file_list: + run_cmd(f"wb_command -add-to-spec-file {spec_file} INVALID {f}") + else: + # Volume + file_list = [] + file_list += sorted(self.out_dir.glob("sub-*_space-T1w*.nii.gz")) + for f in file_list: + run_cmd(f"wb_command -add-to-spec-file {spec_file} INVALID {f}") + + return spec_file + + def remove_unnecessary_file( + self, keep_gifti_native: bool = False, keep_gifti_fsLR: bool = False + ) -> None: + """Removes intermediate files. + + Args: + keep_gifti_native: If ture, keep native space GIFTI files. + keep_gifti_fsLR: If true, keep fsLR space GIFTI files. + """ + + if not (keep_gifti_native and keep_gifti_fsLR): + print("\n###Cleanup unnecessary file###\n", flush=True) + + # fsnative + if not keep_gifti_native: + file_list = [] + for _, f in {**self.native["L"], **self.native["R"]}.items(): + # metric + if ".shape.gii" in Path(f).name: + # exclude (no)medialwall ROI + if "probseg.shape.gii" not in Path(f).name: + file_list.append(f) + # label + if "dseg.label.gii" in Path(f).name: + file_list.append(f) + for f in file_list: + print(f"Cleaning {f.name} ...", flush=True) + f.unlink() + + # fsLR + if not keep_gifti_fsLR: + file_list = [] + for _, f in {**self.fsLR["L"], **self.fsLR["R"]}.items(): + # metric + if ".shape.gii" in Path(f).name: + file_list.append(f) + # label + if "dseg.label.gii" in Path(f).name: + file_list.append(f) + for f in file_list: + print(f"Cleaning {f.name} ...", flush=True) + f.unlink() + + +class NativeSurfaceFunc: + """Native space surface-based functional processing pipeline.""" + + def __init__( + self, + sub_id: Union[int, str], + surf_dir: PathLike, + out_dir: PathLike, + anat_ses_id: Optional[str] = None, + anat_run_id: Optional[str] = None, + ) -> None: + """Initializes class. + + Args: + sub_id: SubjectID. + surf_dir: Surface files directory. Usually it is the output + directory of the Anatomical pipeline. + out_dir: Directory to store output file. + anat_ses_id: Anatomical image SessionID. It is used for + selecting surfaces generated by Anatomical pipeline. + anat_run_id: Anatomical image RunID. It is used for + selecting surfaces generated by Anatomical pipeline. + """ + + ############# + # Directories + ############# + self.sub_id = conform_sub_id(sub_id, with_prefix=False) + self.surf_dir = Path(surf_dir) + self.out_dir = Path(out_dir) + self.out_dir.mkdir(exist_ok=True, parents=True) + # regex for BIDS complaint functional file + self.func_regex = r"sub-[^\W_]+_(ses-[^\W_]+)?_?task-[^\W_]+_(run-\d+)?_?" + # Anatomical filename prefix + self.anat_prefix = f"sub-{self.sub_id}" + if anat_ses_id: + self.anat_prefix += f"_ses-{anat_ses_id}" + if anat_run_id: + self.anat_prefix += f"_run-{anat_run_id}" + # Store important result files + self.volume = {} + self.native = {"L": {}, "R": {}} + + def run_native_space_func_pipeline( + self, + func_file: PathLike, + timestep: Union[float, int], + ref_file: PathLike, + smoothing_fwhm: Optional[list[Union[float, int]]] = None, + timestep_format: str = ":.1f", + grey_ribbon_value: int = 1, + neighborhood_smoothing: Union[float, int] = 5, + ci_limit: Union[float, int] = 0.5, + dilate_distance: Optional[Union[float, int]] = 10, + debug: bool = False, + ) -> dict[str, Path]: + """Runs native space surface-based functional data pipeline. + + Args: + func_file: Functional image file. + timestep: The temporal interval of consecutive time points + in the func_file. Usually it's the repetition time of + the functional image. + ref_file: Volume image file used as reference of generated + cortical ribbon file. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). + timestep_format: Float number format in the map name. + grey_ribbon_value: Index value of the ribbon voxels. See + function 'make_cortical_ribbon'. + neighborhood_smoothing: Spatial smoothing kernal sigma + (FWHM, mm) for finding good voxels. See function + 'make_good_voxel_mask'. + ci_limit: Parameter to control the good voxel threshold. + Smaller value relates to stricter threshold. See + function 'make_good_voxel_mask'. + dilate_distance: Dilate distance (mm) applies to surface + sampled functional data. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_file name is not BIDS-compliant. + """ + + # Parse filename prefix + func_prefix = re.search(self.func_regex, Path(func_file).name) + if func_prefix is None: + raise ValueError("The func_file name is not BIDS-compliant.") + func_prefix = func_prefix.group() + # Parse smoothing fwhm + smoothing_fwhm = parse_smoothing_fwhm(smoothing_fwhm) + + res = {} + # Make cortical ribbon mask + print("\n###Make cortical ribbon mask###\n", flush=True) + out = self.make_cortical_ribbon(ref_file, grey_ribbon_value=grey_ribbon_value, debug=debug) + res.update(out) + # Make good voxel mask + print("\n###Make good voxel mask###\n", flush=True) + out = self.make_good_voxel_mask( + func_file, + neighborhood_smoothing=neighborhood_smoothing, + ci_limit=ci_limit, + debug=debug, + ) + res.update(out) + # Sample functional data to surface + print("\n###Sample functional data to native surface###\n", flush=True) + for hemi in ["L", "R"]: + out = self.sample_func_to_surface( + hemi, + func_file, + timestep, + dilate_distance=dilate_distance, + timestep_format=timestep_format, + ) + res.update(out) + # Smoothing functional data in native space + if smoothing_fwhm is not None: + print("\n###Smooth functional data on native surface###\n", flush=True) + for fwhm in smoothing_fwhm: + for hemi in ["L", "R"]: + out = self.smooth_native_func(hemi, func_prefix, fwhm) + res.update(out) + # Make diagnositic metric + print("\n###Make diagnositic metric on native surface###\n", flush=True) + for hemi in ["L", "R"]: + out = self.make_diagnositic_metric(hemi, func_file, dilate_distance=dilate_distance) + res.update(out) + + return res + + def make_cortical_ribbon( + self, ref_file: PathLike, grey_ribbon_value: int = 1, debug: bool = False + ) -> dict[str, Path]: + """Make cortical ribbon volume from white and pial surface. + + Args: + ref_file: Volume image file used as reference of generated + cortical ribbon file. + grey_ribbon_value: Index value of the ribbon voxels. + debug: If true, output intermediate files. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The ref_file name is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse filename prefix + volume_prefix = re.search( + self.func_regex + "space-[^\W_]+(_res-[^\W_]+)?_?", Path(ref_file).name + ) + if volume_prefix is None: + raise ValueError("The ref_file name is not BIDS-compliant.") + volume_prefix = volume_prefix.group() + func_prefix = re.search(self.func_regex, Path(ref_file).name).group() + + # Required file + left_wm_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-L_space-fsnative_den-fsnative_wm.surf.gii" + ) + left_pial_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-L_space-fsnative_den-fsnative_pial.surf.gii" + ) + right_wm_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-R_space-fsnative_den-fsnative_wm.surf.gii" + ) + right_pial_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-R_space-fsnative_den-fsnative_pial.surf.gii" + ) + for f in [left_wm_file, left_pial_file, right_wm_file, right_pial_file]: + if not f.is_file(): + raise FileNotFoundError(f"Surface {f} not found. Run pipeline 'Anatomical' first.") + + # Output + out_file = self.out_dir.joinpath(volume_prefix + "desc-ribbon_mask.nii.gz") + + # Make ribbon volume + res = {} + print(f"Creating cortical ribbon: {ref_file} ...", flush=True) + out_file = make_cortical_ribbon( + ref_file, + out_file, + left_wm_file=left_wm_file, + left_pial_file=left_pial_file, + right_wm_file=right_wm_file, + right_pial_file=right_pial_file, + grey_ribbon_value=grey_ribbon_value, + debug=debug, + ) + res[f"{func_prefix}ribbon_mask"] = out_file + self.volume[f"{func_prefix}ribbon_mask"] = out_file + + return res + + def make_good_voxel_mask( + self, + func_file: PathLike, + neighborhood_smoothing: Union[float, int] = 5, + ci_limit: Union[float, int] = 0.5, + debug: bool = False, + ) -> dict[str, Path]: + """Makes a mask of good cortical voxels. + + Args: + func_file: Functional image file. + neighborhood_smoothing: Spatial smoothing kernal sigma (mm). + ci_limit: Parameter to control the good voxel threshold. Smaller + value relates to stricter threshold. + debug: If true, output intermediate files to out_dir. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_file name is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse filename prefix + volume_prefix = re.search( + self.func_regex + "space-[^\W_]+(_res-[^\W_]+)?_?", Path(func_file).name + ) + if volume_prefix is None: + raise ValueError("The func_file name is not BIDS-compliant.") + volume_prefix = volume_prefix.group() + func_prefix = re.search(self.func_regex, Path(func_file).name).group() + + # Required file + ribbon_file = self.out_dir.joinpath(volume_prefix + "desc-ribbon_mask.nii.gz") + if not ribbon_file.is_file(): + raise FileNotFoundError( + f"Cortical ribbon mask {ribbon_file} not found. " + "Run function 'make_cortical_ribbon' first." + ) + + # Output + out_file = self.out_dir.joinpath(volume_prefix + "desc-goodvoxel_mask.nii.gz") + + # Find good voxel + res = {} + print(f"Creating good voxel mask: {func_file} ...", flush=True) + out_file = find_good_voxel( + func_file, + ribbon_file, + out_file, + neighborhood_smoothing=neighborhood_smoothing, + ci_limit=ci_limit, + debug=debug, + ) + res[f"{func_prefix}goodvoxel_mask"] = out_file + self.volume[f"{func_prefix}goodvoxel_mask"] = out_file + + return res + + def sample_func_to_surface( + self, + hemi: Literal["L", "R"], + func_file: PathLike, + timestep: Union[float, int], + dilate_distance: Optional[Union[float, int]] = 10, + timestep_format: str = ":.1f", + ) -> dict[str, Path]: + """Samples volumetric functional data to cortical surface. + + Args: + hemi: Brain hemisphere. + func_file: Functional image file. + timestep: The temporal interval of consecutive time points + in the func_file. Usually it's the repetition time of + the functional image. + dilate_distance: Dilate distance (mm) applies to surface + sampled functional data. + timestep_format: Float number format in the map name. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_file name is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse filename prefix + volume_prefix = re.search( + self.func_regex + "space-[^\W_]+(_res-[^\W_]+)?_?", Path(func_file).name + ) + if volume_prefix is None: + raise ValueError("The func_file name is not BIDS-compliant.") + volume_prefix = volume_prefix.group() + func_prefix = re.search(self.func_regex, Path(func_file).name).group() + + # Required file + wm_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_wm.surf.gii" + ) + pial_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_pial.surf.gii" + ) + midthickness_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + surf_mask_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + vol_mask_file = self.out_dir.joinpath(volume_prefix + "desc-goodvoxel_mask.nii.gz") + for f in [wm_file, pial_file, midthickness_file, surf_mask_file]: + if not f.is_file(): + raise FileNotFoundError(f"Surface {f} not found. Run pipeline 'Anatomical' first.") + if not vol_mask_file.is_file(): + raise FileNotFoundError( + f"Good voxel volume mask {vol_mask_file} not found. " + "Run function 'make_good_voxel_mask' first." + ) + + # Output + out_file = self.out_dir.joinpath( + func_prefix + f"hemi-{hemi}_space-fsnative_den-fsnative_desc-sm0pt0_bold.func.gii" + ) + + # Sample func data to surface + res = {} + print(f"Sampling func data to hemi-{hemi} surface: {func_file} ...", flush=True) + out_file = sample_volume_to_surface( + func_file, + wm_file, + pial_file, + midthickness_file, + out_file, + vol_mask_file=vol_mask_file, + surf_mask_file=surf_mask_file, + dilate_distance=dilate_distance, + ) + # Set GIFTI metadata + with tempfile.TemporaryDirectory() as tmp_dir: + name_file = make_func_map_name( + func_file, + timestep, + Path(tmp_dir).joinpath("mapname.txt"), + float_format=timestep_format, + ) + run_cmd( + f"wb_command -disable-provenance -set-map-names {out_file} -name-file {name_file}" + ) + res[f"{func_prefix}hemi-{hemi}_sm0pt0_bold"] = out_file + self.native[hemi][f"{func_prefix}hemi-{hemi}_sm0pt0_bold"] = out_file + + return res + + def smooth_native_func( + self, + hemi: Literal["L", "R"], + func_prefix: str, + smoothing_fwhm: Union[float, int], + ) -> dict[str, Path]: + """Smooths native surface functional data. + + Args: + hemi: Brain hemisphere. + func_prefix: Functional image filename prefix. For example, + sub-001_ses-01_task-XXX_run-1_. It should match the + surface sampled functional file. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_prefix is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse smoothing fwhm + fwhm = convert_fwhm_to_str(smoothing_fwhm) + # Parse func_prefix + func_prefix = f"{func_prefix}_" if not func_prefix.endswith("_") else func_prefix + func_prefix = re.search(self.func_regex, func_prefix) + if func_prefix is None: + raise ValueError("The func_prefix is not BIDS-compliant.") + func_prefix = func_prefix.group() + + # Required files + func_file = self.out_dir.joinpath( + func_prefix + f"hemi-{hemi}_space-fsnative_den-fsnative_desc-sm0pt0_bold.func.gii" + ) + midthickness_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + surf_mask_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + if not func_file.is_file(): + raise FileNotFoundError( + f"Native surface functional file {func_file} is not found. " + "Run function 'sample_func_to_surface' first." + ) + for f in [midthickness_file, surf_mask_file]: + if not f.is_file(): + raise FileNotFoundError( + f"Surface {f} is not found. Run pipeline 'Anatomical' first." + ) + + # Output + out_file = self.out_dir.joinpath(func_file.name.replace("sm0pt0", f"sm{fwhm}")) + + # Smoothing + print(f"Smoothing func data with FWHM={smoothing_fwhm}mm: {func_file} ...", flush=True) + res = {} + out_file = smooth_metric( + func_file, midthickness_file, out_file, smoothing_fwhm, roi_file=surf_mask_file + ) + res[f"{func_prefix}hemi-{hemi}_sm{fwhm}_bold"] = out_file + self.native[hemi][f"{func_prefix}hemi-{hemi}_sm{fwhm}_bold"] = out_file + + return res + + def make_diagnositic_metric( + self, + hemi: Literal["L", "R"], + func_file: PathLike, + dilate_distance: Optional[Union[float, int]] = 10, + ) -> dict[str, Path]: + """Makes diagnositic metric for surface sampled functional data. + + Args: + hemi: Brain hemisphere. + func_file: Functional image file. + dilate_distance: Dilate distance (mm) applies to surface + sampled functional data. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_file name is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse filename prefix + volume_prefix = re.search( + self.func_regex + "space-[^\W_]+(_res-[^\W_]+)?_?", Path(func_file).name + ) + if volume_prefix is None: + raise ValueError("The func_file name is not BIDS-compliant.") + volume_prefix = volume_prefix.group() + func_prefix = re.search(self.func_regex, Path(func_file).name).group() + + # Required file + wm_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_wm.surf.gii" + ) + pial_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_pial.surf.gii" + ) + midthickness_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + surf_mask_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + vol_mask_file = self.out_dir.joinpath(volume_prefix + "desc-goodvoxel_mask.nii.gz") + for f in [wm_file, pial_file, midthickness_file, surf_mask_file]: + if not f.is_file(): + raise FileNotFoundError(f"Surface {f} not found. Run pipeline 'Anatomical' first.") + if not vol_mask_file.is_file(): + raise FileNotFoundError( + f"Good voxel volume mask {vol_mask_file} not found. " + "Run function 'make_good_voxel_mask' first." + ) + + print(f"Making diagnositic metric for hemi-{hemi} surface: {func_file} ...", flush=True) + res = {} + with tempfile.TemporaryDirectory() as tmp_dir: + + # Calculate temporal mean and coefficient of variation (cov) + tmean_file = Path(tmp_dir, "Mean.nii.gz") + tstd_file = Path(tmp_dir, "SD.nii.gz") + cov_file = Path(tmp_dir, "cov.nii.gz") + run_cmd(f"fslmaths {func_file} -Tmean {tmean_file} -odt float") + run_cmd(f"fslmaths {func_file} -Tstd {tstd_file} -odt float") + run_cmd(f"fslmaths {tstd_file} -div {tmean_file} {cov_file}") + + # Sample Tmean and Cov to surface (only good voxels) + for metric_id, metric_file in [("mean", tmean_file), ("cov", cov_file)]: + out_file = Path(self.out_dir).joinpath( + func_prefix + f"hemi-{hemi}_space-fsnative_den-fsnative_" + f"desc-goodvoxel_{metric_id}.shape.gii" + ) + out_file = sample_volume_to_surface( + metric_file, + wm_file, + pial_file, + midthickness_file, + out_file, + vol_mask_file=vol_mask_file, + surf_mask_file=surf_mask_file, + dilate_distance=dilate_distance, + ) + run_cmd( + f"wb_command -disable-provenance -set-map-names {out_file} " + f"-map 1 {func_prefix}hemi-{hemi}_desc-goodvoxel_{metric_id}" + ) + res[f"{func_prefix}hemi-{hemi}_desc-goodvoxel_{metric_id}"] = out_file + self.native[hemi][ + f"{func_prefix}hemi-{hemi}_desc-goodvoxel_{metric_id}" + ] = out_file + + # Sample Tmean and Cov to surface (all voxels) + for metric_id, metric_file in [("mean", tmean_file), ("cov", cov_file)]: + out_file = Path(self.out_dir).joinpath( + func_prefix + f"hemi-{hemi}_space-fsnative_den-fsnative_" + f"desc-allvoxel_{metric_id}.shape.gii" + ) + out_file = sample_volume_to_surface( + metric_file, + wm_file, + pial_file, + midthickness_file, + out_file, + vol_mask_file=None, + surf_mask_file=surf_mask_file, + dilate_distance=None, + ) + run_cmd( + f"wb_command -disable-provenance -set-map-names {out_file} " + f"-map 1 {func_prefix}hemi-{hemi}_desc-allvoxel_{metric_id}" + ) + res[f"{func_prefix}hemi-{hemi}_desc-allvoxel_{metric_id}"] = out_file + self.native[hemi][f"{func_prefix}hemi-{hemi}_desc-allvoxel_{metric_id}"] = out_file + + # Sample good voxel mask to surface + out_file = Path(self.out_dir).joinpath( + func_prefix + f"hemi-{hemi}_space-fsnative_den-fsnative_" + f"desc-goodvoxel_probseg.shape.gii" + ) + out_file = sample_volume_to_surface( + vol_mask_file, + wm_file, + pial_file, + midthickness_file, + out_file, + vol_mask_file=None, + surf_mask_file=surf_mask_file, + dilate_distance=None, + ) + run_cmd( + f"wb_command -disable-provenance -set-map-names {out_file} " + f"-map 1 {func_prefix}hemi-{hemi}_desc-goodvoxel_probseg" + ) + res[f"{func_prefix}hemi-{hemi}_desc-goodvoxel_probseg"] = out_file + self.native[hemi][f"{func_prefix}hemi-{hemi}_desc-goodvoxel_probseg"] = out_file + + return res + + +class ResampleSurfaceFunc(NativeSurfaceFunc): + """Native surface space functional to fsLR processing pipeline.""" + + def __init__( + self, + sub_id: Union[int, str], + surf_dir: PathLike, + template_dir: PathLike, + out_dir: PathLike, + anat_ses_id: Optional[str] = None, + anat_run_id: Optional[str] = None, + ) -> None: + """Initializes class. + + Args: + sub_id: SubjectID. + surf_dir: Surface files directory. Usually it is the output + directory of the Anatomical pipeline. + template_dir: Directory contains required template files. + out_dir: Directory to store output file. + anat_ses_id: Anatomical image SessionID. It is used for + selecting surfaces generated by Anatomical pipeline. + anat_run_id: Anatomical image RunID. It is used for + selecting surfaces generated by Anatomical pipeline. + """ + + super().__init__( + sub_id, surf_dir, out_dir, anat_ses_id=anat_ses_id, anat_run_id=anat_run_id + ) + self.template_dir = Path(template_dir) + # Store important result files + self.fsLR = {"L": {}, "R": {}} + self.cifti = {} + + def run_resample_func_pipeline( + self, + func_file: PathLike, + timestep: Union[float, int], + mesh_den: str, + func_std_file: Optional[PathLike] = None, + smoothing_fwhm: Optional[Union[float, int, list[Union[float, int]]]] = None, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + debug: bool = False, + ) -> dict[str, Path]: + """Runs functional data resample (fsLR, MNI space) pipeline. + + Args: + func_file: Functional image file. Only used for getting + filename prefix to match sampled surface files. + timestep: The temporal interval of consecutive time points + in the func_file. Usually it's the repetition time of + the functional image. + mesh_den: Target fsLR space mesh density. + func_std_file: Functional image file. It should be in + MNI152NLin6Asym space with 2mm resolution. Optional. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). If + None, no spatial smoothing applies to the functional + data. It could be a list of numbers indicate multiple + smoothing levels. The unsmooth data is always generated + even 0 is not in the smoothing_fwhm list. + registration_method: Surface-based registration method. + debug: If true, output intermediate files. + + Returns: + A dict stores generated files. + + Raises: + ValueError: fun_std_file/func_prefix is not BIDS-compliant. + ValueError: func_file and func_std_file does not match. + FileNotFoundError: Required file is not found. + """ + + # Parse functional file name prefix + func_prefix = re.search(self.func_regex, Path(func_file).name) + if func_prefix is None: + raise ValueError("The func_file name is not BIDS-compliant.") + func_prefix = func_prefix.group() + # Parse smoothing fwhm + # Note: if 0 presents in the list, remove it from comformed list + smoothing_fwhm = parse_smoothing_fwhm(smoothing_fwhm, remove_zero=True) + # Parse func_std_file + if func_std_file is not None: + if not Path(func_std_file).name.startswith(func_prefix): + raise ValueError("Input func_file and func_std_file does not match.") + with_volume = True + else: + with_volume = False + + res = {} + # Resample functional data from native to fsLR space + print("\n###Resample functional data to fsLR space###\n", flush=True) + for hemi in ["L", "R"]: + out = self.resample_func_to_fsLR( + hemi, + func_prefix, + mesh_den, + registration_method=registration_method, + ) + res.update(out) + # Resample diagnositic metric data from native to fsLR space + print("\n###Resample diagnositic metric data to fsLR space###\n", flush=True) + for hemi in ["L", "R"]: + out = self.resample_diagnositic_metric_to_fsLR( + hemi, + func_prefix, + mesh_den, + registration_method=registration_method, + ) + res.update(out) + # Smoothing functional data in fsLR space + if smoothing_fwhm is not None: + print("\n###Smooth functional data in fsLR surface###\n", flush=True) + for fwhm in smoothing_fwhm: + for hemi in ["L", "R"]: + out = self.smooth_fsLR_func( + hemi, + func_prefix, + mesh_den, + fwhm, + registration_method=registration_method, + ) + res.update(out) + # Extract functional data in MNI152NLin6Asym space ROIs + if with_volume: + print("\n###Extract functional data in subcortical ROIs###\n", flush=True) + out = self.extract_func_subcortical( + func_std_file, + func_prefix=func_prefix, + smoothing_fwhm=None, + debug=debug, + ) + res.update(out) + # Smoothing if requested + if smoothing_fwhm is not None: + for fwhm in smoothing_fwhm: + out = self.extract_func_subcortical( + func_std_file, + func_prefix=func_prefix, + smoothing_fwhm=fwhm, + debug=debug, + ) + res.update(out) + # Make functional dtseries CIFTI file (fsLR, MNI space) + print("\n###Make functional dtseries CIFTI file###\n", flush=True) + if func_std_file is None: + print( + "Argument func_std_file is None. " + "Output CIFTI files does not include subcortical volume data.\n", + flush=True, + ) + out = self.make_func_fsLR_cifti( + func_prefix, timestep, mesh_den, smoothing_fwhm=None, include_volume=with_volume + ) + res.update(out) + # Smoothed file + if smoothing_fwhm is not None: + for fwhm in smoothing_fwhm: + out = self.make_func_fsLR_cifti( + func_prefix, + timestep, + mesh_den, + smoothing_fwhm=fwhm, + include_volume=with_volume, + ) + res.update(out) + # Make diagnositic dscalar CIFTI file (fsLR) + print("\n###Make diagnositic dscalar CIFTI file###\n", flush=True) + out = self.make_diagnositic_metric_fsLR_cifti(func_prefix, mesh_den, smoothing_fwhm=None) + res.update(out) + # Smoothed file + if smoothing_fwhm is not None: + for fwhm in smoothing_fwhm: + out = self.make_diagnositic_metric_fsLR_cifti( + func_prefix, mesh_den, smoothing_fwhm=fwhm + ) + res.update(out) + + return res + + def resample_func_to_fsLR( + self, + hemi: Literal["L", "R"], + func_prefix: str, + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Resamples native surface functional data to fsLR space. + + Args: + hemi: Brain hemisphere. + func_prefix: Functional image filename prefix. For example, + sub-001_ses-01_task-XXX_run-1_. It should match the + surface sampled functional file. + mesh_den: Target fsLR space mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_prefix is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse func_prefix + func_prefix = f"{func_prefix}_" if not func_prefix.endswith("_") else func_prefix + func_prefix = re.search(self.func_regex, func_prefix) + if func_prefix is None: + raise ValueError("The func_prefix is not BIDS-compliant.") + func_prefix = func_prefix.group() + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse registration method + method = parse_registration_method(registration_method) + + # Required files + func_file = self.out_dir.joinpath( + func_prefix + f"hemi-{hemi}_space-fsnative_den-fsnative_desc-sm0pt0_bold.func.gii" + ) + warpped_sphere_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-{method}_sphere.surf.gii" + ) + template_sphere_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_sphere.surf.gii" + ) + midthickness_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + warpped_midthickness_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_midthickness.surf.gii" + ) + roi_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + template_roi_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + if not func_file.is_file(): + raise FileNotFoundError( + f"Native surface functional file {func_file} is not found." + "Run pipeline 'run_native_space_func_pipeline' first." + ) + for f in [ + warpped_sphere_file, + template_sphere_file, + midthickness_file, + warpped_midthickness_file, + roi_file, + template_roi_file, + ]: + if not f.is_file(): + raise FileNotFoundError( + f"Surface {f} is not found. Run pipeline 'Anatomical' first." + ) + + # Output + out_file = self.out_dir.joinpath( + func_prefix + f"hemi-{hemi}_space-fsLR_den-{mesh_den}_desc-sm0pt0_bold.func.gii" + ) + + # Resample func data + print(f"Resampling func data to fsLR {mesh_den} space: {func_file} ...", flush=True) + res = {} + out_file = resample_metric( + func_file, + warpped_sphere_file, + template_sphere_file, + out_file, + current_area_surf_file=midthickness_file, + target_area_surf_file=warpped_midthickness_file, + roi_file=roi_file, + resample_method="ADAP_BARY_AREA", + ) + # Apply (no)medialwall in target space + run_cmd( + f"wb_command -disable-provenance -metric-mask {out_file} {template_roi_file} {out_file}" + ) + res[f"{func_prefix}hemi-{hemi}_fsLR_{mesh_den}_sm0pt0_bold"] = out_file + self.fsLR[hemi][f"{func_prefix}hemi-{hemi}_fsLR_{mesh_den}_sm0pt0_bold"] = out_file + + return res + + def resample_diagnositic_metric_to_fsLR( + self, + hemi: Literal["L", "R"], + func_prefix: str, + mesh_den: str, + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Resamples native surface diagnositic metric to fsLR space. + + Args: + hemi: Brain hemisphere. + func_prefix: Functional image filename prefix. For example, + sub-001_ses-01_task-XXX_run-1_. It should match the + surface sampled functional file. + mesh_den: Target fsLR space mesh density. + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_prefix is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse hemisphere + hemi, _ = parse_hemi(hemi) + # Parse func_prefix + func_prefix = f"{func_prefix}_" if not func_prefix.endswith("_") else func_prefix + func_prefix = re.search(self.func_regex, func_prefix) + if func_prefix is None: + raise ValueError("The func_prefix is not BIDS-compliant.") + func_prefix = func_prefix.group() + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse registration method + method = parse_registration_method(registration_method) + + # Common required files + warpped_sphere_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-fsnative_desc-{method}_sphere.surf.gii" + ) + template_sphere_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_sphere.surf.gii" + ) + midthickness_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_midthickness.surf.gii" + ) + warpped_midthickness_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_midthickness.surf.gii" + ) + roi_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsnative_den-fsnative_" + "desc-nomedialwall_probseg.shape.gii" + ) + template_roi_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + for f in [ + warpped_sphere_file, + template_sphere_file, + midthickness_file, + warpped_midthickness_file, + roi_file, + template_roi_file, + ]: + if not f.is_file(): + raise FileNotFoundError( + f"Surface {f} is not found. Run pipeline 'Anatomical' first." + ) + + res = {} + for suffix in [ + "desc-goodvoxel_mean", + "desc-goodvoxel_cov", + "desc-allvoxel_mean", + "desc-allvoxel_cov", + "desc-goodvoxel_probseg", + ]: + # Required file + metric_file = self.out_dir.joinpath( + func_prefix + f"hemi-{hemi}_space-fsnative_den-fsnative_{suffix}.shape.gii" + ) + if not metric_file.is_file(): + raise FileNotFoundError( + f"Native surface diagnositic metric file {metric_file} is not found." + "Run function 'make_diagnositic_metric' first." + ) + + # Output + out_file = self.out_dir.joinpath( + func_prefix + f"hemi-{hemi}_space-fsLR_den-{mesh_den}_{suffix}.shape.gii" + ) + # Resample diagnositic data + print( + f"Resampling diagnositic metric data to fsLR {mesh_den} space: {metric_file} ...", + flush=True, + ) + out_file = resample_metric( + metric_file, + warpped_sphere_file, + template_sphere_file, + out_file, + current_area_surf_file=midthickness_file, + target_area_surf_file=warpped_midthickness_file, + roi_file=roi_file, + resample_method="ADAP_BARY_AREA", + ) + # Apply (no)medialwall in target space + run_cmd( + f"wb_command -disable-provenance -metric-mask {out_file} " + f"{template_roi_file} {out_file}" + ) + res[f"{func_prefix}hemi-{hemi}_fsLR_{mesh_den}_{suffix}"] = out_file + self.fsLR[hemi][f"{func_prefix}hemi-{hemi}_fsLR_{mesh_den}_{suffix}"] = out_file + + return res + + def smooth_fsLR_func( + self, + hemi: Literal["L", "R"], + func_prefix: str, + mesh_den: str, + smoothing_fwhm: Union[float, int], + registration_method: Literal["FS", "MSMSulc"] = "MSMSulc", + ) -> dict[str, Path]: + """Smooths fsLR space functional data. + + Args: + hemi: Brain hemisphere. + func_prefix: Functional image filename prefix. For example, + sub-001_ses-01_task-XXX_run-1_. It should match the + surface sampled functional file. + mesh_den: Target fsLR space mesh density. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). + registration_method: Surface-based registration method. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_prefix is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse func_prefix + func_prefix = f"{func_prefix}_" if not func_prefix.endswith("_") else func_prefix + func_prefix = re.search(self.func_regex, func_prefix) + if func_prefix is None: + raise ValueError("The func_prefix is not BIDS-compliant.") + func_prefix = func_prefix.group() + # Parse smoothing fwhm + fwhm = convert_fwhm_to_str(smoothing_fwhm) + # Parse registration method + method = parse_registration_method(registration_method) + + # Required files + func_file = self.out_dir.joinpath( + func_prefix + f"hemi-{hemi}_space-fsLR_den-{mesh_den}_desc-sm0pt0_bold.func.gii" + ) + midthickness_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_hemi-{hemi}_space-fsLR_den-{mesh_den}_" + f"desc-{method}_midthickness.surf.gii" + ) + surf_mask_file = self.template_dir.joinpath( + f"fsLR_hemi-{hemi}_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + if not func_file.is_file(): + raise FileNotFoundError( + f"fsLR space functional file {func_file} is not found." + "Run function 'resample_func_to_fsLR' first." + ) + for f in [midthickness_file, surf_mask_file]: + if not f.is_file(): + raise FileNotFoundError( + f"Surface {f} is not found. Run pipeline 'Anatomical' first." + ) + + # Output + out_file = self.out_dir.joinpath(func_file.name.replace("sm0pt0", f"sm{fwhm}")) + + # Smoothing + print(f"Smoothing func data with FWHM={smoothing_fwhm}mm: {func_file} ...", flush=True) + res = {} + out_file = smooth_metric( + func_file, midthickness_file, out_file, smoothing_fwhm, roi_file=surf_mask_file + ) + res[f"{func_prefix}hemi-{hemi}_fsLR_{mesh_den}_sm{fwhm}_bold"] = out_file + self.fsLR[hemi][f"{func_prefix}hemi-{hemi}_fsLR_{mesh_den}_sm{fwhm}_bold"] = out_file + + return res + + def extract_func_subcortical( + self, + func_std_file: PathLike, + func_prefix: Optional[str] = None, + smoothing_fwhm: Optional[Union[float, int]] = None, + debug: bool = False, + ) -> dict[str, Path]: + """Extracts functional data in standard subcortical ROIs. + + Args: + func_std_file: Functional image file. It should be in + MNI152NLin6Asym space with 2mm resolution. + func_prefix: Functional image filename prefix. For example, + sub-001_ses-01_task-XXX_run-1_. It should match the + surface sampled functional file. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). If + None, no spatial smoothing applies to the functional + data. Note, this operation is constrained within each + subcortical region. + debug: If true, output intermediate files. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The fun_std_file/func_prefix is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse func_std_file + func_std_prefix = re.search(self.func_regex, Path(func_std_file).name) + if func_std_prefix is None: + raise ValueError("The func_std_file name is not BIDS-compliant.") + # Parse func_prefix + if func_prefix is None: + func_prefix = re.search(self.func_regex, Path(func_std_file).name) + if func_prefix is None: + raise ValueError("The func_std_file name is not BIDS-compliant.") + else: + func_prefix = f"{func_prefix}_" if not func_prefix.endswith("_") else func_prefix + func_prefix = re.search(self.func_regex, func_prefix) + if func_prefix is None: + raise ValueError("The func_prefix is not BIDS-compliant.") + func_prefix = func_prefix.group() + # Parse smoothing fwhm + if smoothing_fwhm is not None: + fwhm = smoothing_fwhm + fwhm_str = convert_fwhm_to_str(smoothing_fwhm) + else: + fwhm = 0 + fwhm_str = "0pt0" + + # Required files + seg_file = self.surf_dir.joinpath( + f"{self.anat_prefix}_space-MNI152NLin6Asym_res-2_desc-Subcortical_dseg.nii.gz" + ) + template_seg_file = self.template_dir.joinpath( + "MNI_space-MNI152NLin6Asym_res-2_desc-Subcortical_dseg.nii.gz" + ) + for f in [seg_file, template_seg_file]: + if not f.is_file(): + raise FileNotFoundError(f"ROI {f} is not found. Run pipeline 'Anatomical' first.") + + # Output + out_file = self.out_dir.joinpath( + func_prefix + f"space-MNI152NLin6Asym_res-02_desc-Subcortical-sm{fwhm_str}_bold.nii.gz" + ) + + # Extract func data + print( + f"Extracting func data in subcortical ROIs with FWHM={fwhm}mm: {func_std_file} ...", + flush=True, + ) + res = {} + out_file = extract_func_subcortical( + func_std_file, + seg_file, + template_seg_file, + out_file, + smoothing_fwhm=smoothing_fwhm, + debug=debug, + ) + res[f"{func_prefix}MNI152NLin6Asym_res-02_Subcortical_sm{fwhm_str}"] = out_file + self.volume[f"{func_prefix}MNI152NLin6Asym_res-02_Subcortical_sm{fwhm_str}"] = out_file + + return res + + def make_func_fsLR_cifti( + self, + func_prefix: str, + timestep: Union[float, int], + mesh_den: str, + smoothing_fwhm: Optional[Union[float, int]] = None, + include_volume: bool = True, + ) -> dict[str, Path]: + """Makes functional dtseries CIFTI file (fsLR, MNI space). + + Args: + func_prefix: Functional image filename prefix. For example, + sub-001_ses-01_task-XXX_run-1_. It should match the + surface sampled functional file. + timestep: The temporal interval of consecutive time points + in the func_file. Usually it's the repetition time of + the functional image. + mesh_den: Target fsLR space mesh density. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). + include_volume: If true, include functional data in volume + standard subcortical ROIs. + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_prefix name is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse func_prefix + func_prefix = f"{func_prefix}_" if not func_prefix.endswith("_") else func_prefix + func_prefix = re.search(self.func_regex, func_prefix) + if func_prefix is None: + raise ValueError("The func_prefix is not BIDS-compliant.") + func_prefix = func_prefix.group() + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse smoothing fwhm + if smoothing_fwhm is None: + smoothing_fwhm = 0 + fwhm_str = convert_fwhm_to_str(smoothing_fwhm) + + # Required file + left_surf_file = self.out_dir.joinpath( + func_prefix + f"hemi-L_space-fsLR_den-{mesh_den}_desc-sm{fwhm_str}_bold.func.gii" + ) + right_surf_file = self.out_dir.joinpath( + func_prefix + f"hemi-R_space-fsLR_den-{mesh_den}_desc-sm{fwhm_str}_bold.func.gii" + ) + left_roi_file = self.template_dir.joinpath( + f"fsLR_hemi-L_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + right_roi_file = self.template_dir.joinpath( + f"fsLR_hemi-R_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + for f in [left_surf_file, right_surf_file]: + if not f.is_file(): + raise FileNotFoundError( + f"Surface functional file {f} is not found. " + "Run function 'resample_func_to_fsLR' first." + ) + for f in [left_roi_file, right_roi_file]: + if not f.is_file(): + raise FileNotFoundError( + f"ROI file {f} is not found. Run pipeline 'Anatomical' first." + ) + if include_volume: + volume_file = self.out_dir.joinpath( + func_prefix + + f"space-MNI152NLin6Asym_res-02_desc-Subcortical-sm{fwhm_str}_bold.nii.gz" + ) + template_seg_file = self.template_dir.joinpath( + "MNI_space-MNI152NLin6Asym_res-2_desc-Subcortical_dseg.nii.gz" + ) + if not volume_file.is_file(): + raise FileNotFoundError( + f"Functional file {volume_file} is not found. " + "Run function 'extract_func_subcortical' first." + ) + if not template_seg_file.is_file(): + raise FileNotFoundError( + f"ROI file {volume_file} is not found. Run pipeline 'Anatomical' first." + ) + else: + volume_file, template_seg_file = None, None + + # Output + out_file = self.out_dir.joinpath( + func_prefix + f"space-fsLR_den-{mesh_den}_desc-sm{fwhm_str}_bold.dtseries.nii" + ) + + # Make func dtseries + res = {} + out_file = make_dense_timeseries( + out_file, + timestep, + left_surf_file=left_surf_file, + right_surf_file=right_surf_file, + left_roi_file=left_roi_file, + right_roi_file=right_roi_file, + volume_file=volume_file, + volume_label_file=template_seg_file, + ) + res[f"{func_prefix}fsLR_32k_sm{fwhm_str}"] = out_file + self.cifti[f"{func_prefix}fsLR_32k_sm{fwhm_str}"] = out_file + + return res + + def make_diagnositic_metric_fsLR_cifti( + self, + func_prefix: str, + mesh_den: str, + smoothing_fwhm: Optional[Union[float, int]] = None, + ) -> dict[str, Path]: + """Makes diagnositic metric dscalar CIFTI file (fsLR). + + Args: + func_prefix: Functional image filename prefix. For example, + sub-001_ses-01_task-XXX_run-1_. It should match the + surface sampled functional file. + mesh_den: Target fsLR space mesh density. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). + + Returns: + A dict stores generated files. + + Raises: + ValueError: The func_prefix is not BIDS-compliant. + FileNotFoundError: Required file is not found. + """ + + # Parse func_prefix + func_prefix = f"{func_prefix}_" if not func_prefix.endswith("_") else func_prefix + func_prefix = re.search(self.func_regex, func_prefix) + if func_prefix is None: + raise ValueError("The func_prefix is not BIDS-compliant.") + func_prefix = func_prefix.group() + # Parse target mesh density + mesh_den = parse_mesh_density(mesh_den, valid_list=["164k", "59k", "32k"]) + # Parse smoothing fwhm + if smoothing_fwhm is None: + smoothing_fwhm = 0 + else: + print( + "Only make temporal mean (good voxel) CIFTI file " + "when smoothing kernal size is not 0.", + flush=True, + ) + fwhm_str = convert_fwhm_to_str(smoothing_fwhm) + + # Common required file + left_roi_file = self.template_dir.joinpath( + f"fsLR_hemi-L_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + right_roi_file = self.template_dir.joinpath( + f"fsLR_hemi-R_space-fsLR_den-{mesh_den}_desc-nomedialwall_probseg.shape.gii" + ) + for f in [left_roi_file, right_roi_file]: + if not f.is_file(): + raise FileNotFoundError( + f"ROI file {f} is not found. Run pipeline 'Anatomical' first." + ) + + res = {} + + # Tmean (good voxel) + func_file = self.out_dir.joinpath( + func_prefix + f"space-fsLR_den-{mesh_den}_desc-sm{fwhm_str}_bold.dtseries.nii" + ) + if not func_file.is_file(): + raise FileNotFoundError( + f"Functional dtseries file {f} is not found. " + "Run function 'make_func_fsLR_cifti' first." + ) + # Output + out_file = self.out_dir.joinpath( + func_prefix + f"space-fsLR_den-{mesh_den}_desc-goodvoxel-sm{fwhm_str}_mean.dscalar.nii" + ) + # Make dscalar file + print(f"Creating dense scalar file: {out_file} ...", flush=True) + run_cmd(f"wb_command -disable-provenance -cifti-reduce {func_file} MEAN {out_file}") + run_cmd( + f"wb_command -disable-provenance -set-map-names {out_file} -map 1 " + f"{func_prefix}space-fsLR_den-{mesh_den}_desc-goodvoxel-sm{fwhm_str}_mean" + ) + res[f"{func_prefix}fsLR_32k_desc-sm{fwhm_str}-goodvoxel_mean"] = out_file + self.cifti[f"{func_prefix}fsLR_32k_desc-sm{fwhm_str}-goodvoxel_mean"] = out_file + + # Make other files when smoothing is 0 + if smoothing_fwhm == 0: + for suffix in [ + "desc-goodvoxel_cov", + "desc-allvoxel_mean", + "desc-allvoxel_cov", + "desc-goodvoxel_probseg", + ]: + # Required file + left_surf_file = self.out_dir.joinpath( + func_prefix + f"hemi-L_space-fsLR_den-{mesh_den}_{suffix}.shape.gii" + ) + right_surf_file = self.out_dir.joinpath( + func_prefix + f"hemi-L_space-fsLR_den-{mesh_den}_{suffix}.shape.gii" + ) + for f in [left_surf_file, right_surf_file]: + if not f.is_file(): + raise FileNotFoundError( + f"Mertic file {f} is not found. " + "Run function 'resample_diagnositic_metric_to_fsLR' first." + ) + # Output + out_file = self.out_dir.joinpath( + func_prefix + f"space-fsLR_den-{mesh_den}_{suffix}.dscalar.nii" + ) + # Make dscalar file + out_file = make_dense_scalar( + out_file, + left_surf_file=left_surf_file, + right_surf_file=right_surf_file, + left_roi_file=left_roi_file, + right_roi_file=right_roi_file, + cifti_map_name=f"{func_prefix}space-fsLR_den-{mesh_den}_{suffix}", + ) + self.cifti[f"{func_prefix}fsLR_32k_{suffix}"] = out_file + + return res + + +class FunctionalSurfaceBased(ResampleSurfaceFunc): + def __init__( + self, + sub_id: Union[int, str], + surf_dir: PathLike, + template_dir: PathLike, + out_dir: PathLike, + anat_ses_id: Optional[str] = None, + anat_run_id: Optional[str] = None, + ): + """Initializes class. + + Args: + sub_id: SubjectID. + surf_dir: Surface files directory. Usually it is the output + directory of the Anatomical pipeline. + template_dir: Directory contains required template files. + out_dir: Directory to store output file. + anat_ses_id: Anatomical image SessionID. It is used for + selecting surfaces generated by Anatomical pipeline. + anat_run_id: Anatomical image RunID. It is used for + selecting surfaces generated by Anatomical pipeline. + """ + + super().__init__( + sub_id, + surf_dir, + template_dir, + out_dir, + anat_ses_id=anat_ses_id, + anat_run_id=anat_run_id, + ) + + def run_functional_pipeline( + self, + func_file: PathLike, + timestep: Union[float, int], + ref_file: PathLike, + mesh_den: str, + func_std_file: Optional[PathLike] = None, + smoothing_fwhm: Optional[Union[float, int, list[Union[float, int]]]] = None, + registration_method: str = "MSMSulc", + keep_gifti_native: bool = False, + keep_gifti_fsLR: bool = False, + timestep_format: str = ":.1f", + grey_ribbon_value: int = 1, + neighborhood_smoothing: Union[float, int] = 5, + ci_limit: Union[float, int] = 0.5, + dilate_distance: Optional[Union[float, int]] = 10, + debug: bool = False, + ) -> dict[str, Path]: + """Runs full surface-based functional data pipeline. + + Args: + func_file: Functional image file. + timestep: The temporal interval of consecutive time points + in the func_file. Usually it's the repetition time of + the functional image. + ref_file: Volume image file used as reference of generated + cortical ribbon file. + mesh_den: Target fsLR space mesh density. + func_std_file: Functional image file. It should be in + MNI152NLin6Asym space with 2mm resolution. Optional. + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). If + None, no spatial smoothing applies to the functional + data. It could be a list of numbers indicate multiple + smoothing levels. The unsmooth data is always generated + even 0 is not in the smoothing_fwhm list. + registration_method: Surface-based registration method. + keep_gifti_native: If ture, keep native space GIFTI files. + keep_gifti_fsLR: If true, keep fsLR space GIFTI files. + timestep_format: Float number format in the map name. + grey_ribbon_value: Index value of the ribbon voxels. See + function 'make_cortical_ribbon'. + neighborhood_smoothing: Spatial smoothing kernal sigma + (FWHM, mm) for finding good voxels. See function + 'make_good_voxel_mask'. + ci_limit: Parameter to control the good voxel threshold. + Smaller value relates to stricter threshold. See + function 'make_good_voxel_mask'. + dilate_distance: Dilate distance (mm) applies to surface + sampled functional data. + debug: If true, output intermediate files. + + Returns: + A dict stores generated files. + + Raises: + ValueError: fun_std_file/func_prefix is not BIDS-compliant. + ValueError: func_file and func_std_file does not match. + FileNotFoundError: Required file is not found. + + """ + + print(f"Starting surface-based functional pipeline: {func_file.name}!", flush=True) + + res = {} + # Native space functional data pipeline + smoothing_fwhm_native = smoothing_fwhm if keep_gifti_native else None + out = self.run_native_space_func_pipeline( + func_file, + timestep, + ref_file, + smoothing_fwhm=smoothing_fwhm_native, + timestep_format=timestep_format, + grey_ribbon_value=grey_ribbon_value, + neighborhood_smoothing=neighborhood_smoothing, + ci_limit=ci_limit, + dilate_distance=dilate_distance, + debug=debug, + ) + res.update(out) + # Resample pipeline + out = self.run_resample_func_pipeline( + func_file, + timestep, + mesh_den, + func_std_file=func_std_file, + smoothing_fwhm=smoothing_fwhm, + registration_method=registration_method, + debug=debug, + ) + res.update(out) + # Cleanup + print("\n###Cleanup unnecessary file###\n", flush=True) + self.remove_unnecessary_file( + keep_gifti_native=keep_gifti_native, keep_gifti_fsLR=keep_gifti_fsLR + ) + + print(f"\nSurface-based functional pipeline finished!\n\n\n", flush=True) + + return res + + def remove_unnecessary_file( + self, keep_gifti_native: bool = False, keep_gifti_fsLR: bool = False + ) -> None: + """Removes intermediate files. + + Args: + keep_gifti_native: If ture, keep native space GIFTI files. + keep_gifti_fsLR: If true, keep fsLR space GIFTI files. + """ + + # fsnative + if not keep_gifti_native: + # GIFTI surface + for _, f in {**self.native["L"], **self.native["R"]}.items(): + print(f"Cleaning {f.name} ...", flush=True) + f.unlink() + # Ribbon and good voxel mask + for _, f in self.volume.items(): + for s in ["ribbon_mask", "goodvoxel_mask"]: + if s in f.name: + print(f"Cleaning {f.name} ...", flush=True) + f.unlink() + # fsLR file + if not keep_gifti_fsLR: + # GIFTI surface + for _, f in {**self.fsLR["L"], **self.fsLR["R"]}.items(): + print(f"Cleaning {f.name} ...", flush=True) + f.unlink() + # Subcortical volume + for _, f in self.volume.items(): + for s in ["Subcortical"]: + if s in f.name: + print(f"Cleaning {f.name} ...", flush=True) + f.unlink() diff --git a/pantheon/project.py b/pantheon/project.py new file mode 100644 index 0000000..2da56c1 --- /dev/null +++ b/pantheon/project.py @@ -0,0 +1,1762 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Project main workhorse.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from __future__ import annotations +from typing import Optional, Union, Literal, Any +from pathlib import Path +import warnings +import yaml +import re +import numpy as np +import pandas as pd +import nibabel as nib + +from .masking.roi import parse_roi_id, make_roi_from_spec, unmask +from .image.cifti import read_dscalar, read_dtseries, read_dscalar_roi, read_dtseries_roi +from .utils.validation import ( + check_file_list, + conform_sub_id, + conform_run_id, + conform_task_id, + parse_hemi, +) +from .utils.typing import PathLike, StatMapView, SurfaceView + + +class Layout: + """Project file layout class. + + This class provides a series of functions to get most useful files + under root project directory. + + Attributes: + bids_subject_list: All SubjectID in the BIDS dataset. + subject_list: SubjectID after data exclusion. + task_list: All TaskID in the BIDS dataset. + task_info: A dict contains information of the task. Usually it + contains information of data exclusion. + func_regex: A regular expression to match a task related file. + """ + + def __init__(self, project_root_dir: PathLike) -> None: + """Initializes class. + + Args: + project_root_dir: Root directory of the project. + All subdirectories are relative to this directory. + """ + + ############# + # Directories + ############# + self.base_dir = Path(project_root_dir) + self.code_dir = self.base_dir.joinpath("code", "pantheon") + self.bids_dir = self.base_dir.joinpath("data", "bidsdata") + self.deriv_dir = self.base_dir.joinpath("data", "derivatives") + self.srcdata_dir = self.base_dir.joinpath("data", "sourcedata") + self.extdata_dir = self.base_dir.joinpath("data", "external") + self.metadata_dir = self.base_dir.joinpath("data", "metadata") + self.tmp_dir = self.base_dir.joinpath("temporary_files") + self.fs_dir = self.deriv_dir.joinpath("freesurfer") + self.fmriprep_dir = self.deriv_dir.joinpath("fmriprep") + self.preproc_dir = self.deriv_dir.joinpath("preprocessed") + self.roi_dir = self.deriv_dir.joinpath("roi") + self.singletrial_dir = self.deriv_dir.joinpath("singletrial_response") + + ############################# + # Project specific parameters + ############################# + # Subject list from BIDS dataset + self.bids_subject_file = self.bids_dir.joinpath("participants.tsv") + self.bids_subject_list = list( + pd.read_csv(self.bids_subject_file, sep="\t")["participant_id"] + ) + # Data validation infomation + # task name, default run list + # subject exclusion, run exclusion + self.validataion_file = self.metadata_dir.joinpath("data_validation.yaml") + if self.validataion_file.is_file(): + with open(self.validataion_file, "r") as f: + self.dv = yaml.load(f, Loader=yaml.CLoader) + # Subject list (after exclusion) + self.subject_list = [ + i for i in self.bids_subject_list if i not in self.dv["exclude_subject"] + ] + # Task information + self.task_info = self.dv["task"] + self.task_list = list(self.dv["task"].keys()) + else: + warnings.warn( + f"\nData validation file {self.validataion_file} not found.\n" + "Use default bids_subject_list.\n" + "Task infomation not available." + ) + # Regular expression to match task files + self.func_regex = ( + r"(?Psub-[^\W_]+)_(?Pses-[^\W_]+)?_?" + + r"(?Ptask-[^\W_]+)_(?Prun-\d+)?_?" + ) + + ################ + # Raw data files + ################ + + # BIDS file + def get_anat_file( + self, + sub_id: Union[int, str], + suffix: str = "T1w", + ses_id: Optional[str] = None, + modifier: Optional[str] = None, + ) -> list[Path]: + """Gets anat file in BIDS directory. + + Args: + sub_id: SubjectID. + suffix: BIDS anatomical file suffix (e.g., T1w). + ses_id: SessionID. Optional. + modifier: Any possible filename modifier after session and + before suffix part. + + Returns: + A list of anatomical file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + ses_id = f"_ses-{ses_id}" if ses_id else "" + modifier = f"{modifier}*" if modifier else "" + file_list = sorted( + self.bids_dir.joinpath(sub_id).glob( + f"**/anat/{sub_id}{ses_id}*{modifier}_{suffix}.nii.gz" + ) + ) + _ = check_file_list(file_list, n=1) + return file_list + + def get_func_file( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[Union[str, int]]]] = None, + ses_id: Optional[str] = None, + suffix: str = "bold", + modifier: Optional[str] = None, + exclude: bool = False, + ) -> list[Path]: + """Gets func file in BIDS directory. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + suffix: Filename suffix after modifier and before extexsion. + modifier: Any possible filename modifier after session and + before suffix part. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A list of functional file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + task_id = conform_task_id(task_id, with_prefix=True) + run_list = self.get_run_id(sub_id, task_id, run_id, exclude=exclude) + if not ((len(run_list) == 1) & (run_list[0] == "")): + run_list = [f"_{i}" for i in run_list] + ses_id = f"_ses-{ses_id}" if ses_id else "" + modifier = f"{modifier}*" if modifier else "" + file_list = [] + for run_id in run_list: + file_list += sorted( + self.bids_dir.joinpath(sub_id).glob( + f"**/{sub_id}{ses_id}_{task_id}{run_id}*{modifier}_{suffix}.nii.gz" + ) + ) + check_file_list(file_list, n=len(run_list)) + return file_list + + def get_beh_file( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + suffix: str = "events", + modifier: Optional[str] = None, + exclude: bool = False, + ) -> list[Path]: + """Gets behavior file in BIDS directory. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + suffix: Filename suffix after modifier and before extexsion. + modifier: Any possible filename modifier after session and + before suffix part. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A list of behavior file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + task_id = conform_task_id(task_id, with_prefix=True) + run_list = self.get_run_id(sub_id, task_id, run_id, exclude=exclude) + if not ((len(run_list) == 1) & (run_list[0] == "")): + run_list = [f"_{i}" for i in run_list] + ses_id = f"_ses-{ses_id}" if ses_id else "" + modifier = f"{modifier}*" if modifier else "" + file_list = [] + for run_id in run_list: + file_list += sorted( + self.bids_dir.joinpath(sub_id).glob( + f"**/{sub_id}{ses_id}_{task_id}{run_id}*{modifier}_{suffix}.tsv" + ) + ) + check_file_list(file_list, n=len(run_list)) + return file_list + + #################### + # Preprocessed files + #################### + + def get_fmriprep_anat_file( + self, + sub_id: Union[int, str], + ses_id: Optional[str] = None, + space: Optional[str] = None, + suffix: Literal["T1w", "mask", "dseg", "probseg"] = "T1w", + modifier: Optional[str] = None, + ) -> list[Path]: + """Gets fMRIprep output anat file. + + Args: + sub_id: SubjectID. + ses_id: SessionID. Optional. + space: Image space.If None, selecting T1w space as default. + suffix: Filename suffix. Default is T1w, which selecting the + preprocessed T1w file. Other options are mask, dseg, and + probseg. For dseg and probseg, the modifier is usuarlly + required. + modifier: Any possible filename modifier after space and + before suffix part. + + Returns: + A list of fMRIprep preprocessed anatomical file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + ses_id = f"_ses-{ses_id}" if ses_id else "" + space = f"_space-{space}" if space else "" + modifier = f"{modifier}*" if modifier else "" + file_list = sorted( + self.fmriprep_dir.joinpath(sub_id).glob( + f"**/anat/{sub_id}{ses_id}{space}*{modifier}_{suffix}.nii.gz" + ) + ) + # filter out files of other spaces if requested is T1w + # the space part in the filename is omitted for T1w space + if not space: + file_list = [f for f in file_list if "space-" not in f.name] + check_file_list(file_list, n=1) + return file_list + + def get_fmriprep_anat_xfm_file( + self, + sub_id: Union[int, str], + src_space: str = "T1w", + trg_space: str = "MNI152NLin2009cAsym", + ses_id: Optional[str] = None, + run_id: Optional[str] = None, + ) -> list[Path]: + """Gets fMRIprep output anat spatial transformation file. + + Args: + sub_id: SubjectID. + src_space: Source space of the spatial transformation. + trg_space: Target space of the spatial transformation. + ses_id: SessionID. Optional. + run_id: RunID. Optional. + + Returns: + A list of fMRIPrep generated spatial transformation file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + ses_id = f"_ses-{ses_id}" if ses_id else "" + run_id = f"_run-{run_id}" if run_id else "" + file_list = sorted( + self.fmriprep_dir.joinpath(sub_id).glob( + f"**/{sub_id}{ses_id}{run_id}_from-{src_space}_to-{trg_space}_mode-image_xfm.*" + ) + ) + check_file_list(file_list, n=1) + return file_list + + def get_fmriprep_func_file( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + space: str = "T1w", + suffix: Literal["bold", "boldref", "mask", "dseg"] = "bold", + modifier: Optional[str] = None, + exclude: bool = False, + ) -> list[Path]: + """Gets fMRIPrep output func file. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + space: Image space. + suffix: If 'bold', selecting preprocessed func file. If + 'boldref', selecting reference BOLD file. If 'mask', + selecting run-specific brain mask. If 'dseg', selecting + FreeSurfer generated segmentation. + modifier: Any possible filename modifier after space and + before suffix part. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A list of fMRIprep preprocessed functional file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + task_id = conform_task_id(task_id, with_prefix=True) + run_list = self.get_run_id(sub_id, task_id, run_id, exclude=exclude) + if not ((len(run_list) == 1) & (run_list[0] == "")): + run_list = [f"_{i}" for i in run_list] + ses_id = f"_ses-{ses_id}" if ses_id else "" + space = f"_space-{space}" if space != "" else space + modifier = f"{modifier}*" if modifier else "" + file_list = [] + for run_id in run_list: + file_list += sorted( + self.fmriprep_dir.joinpath(sub_id).glob( + f"**/{sub_id}{ses_id}_{task_id}{run_id}{space}*{modifier}_{suffix}.nii.gz" + ) + ) + check_file_list(file_list, n=len(run_list)) + return file_list + + def get_confound_file( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + exclude: bool = False, + ) -> list[Path]: + """Gets fMRIPrep output confound regressor file. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A list of fMRIPrep generated confound regressor file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + task_id = conform_task_id(task_id, with_prefix=True) + run_list = self.get_run_id(sub_id, task_id, run_id, exclude=exclude) + if not ((len(run_list) == 1) & (run_list[0] == "")): + run_list = [f"_{i}" for i in run_list] + ses_id = f"_ses-{ses_id}" if ses_id else "" + file_list = [] + for run_id in run_list: + file_list += sorted( + self.fmriprep_dir.joinpath(sub_id).glob( + f"**/{sub_id}{ses_id}_{task_id}{run_id}_desc-confounds_timeseries.tsv" + ) + ) + check_file_list(file_list, n=len(run_list)) + return file_list + + def get_preproc_surf_file( + self, + sub_id: Union[int, str], + hemi: Literal["L", "R"], + surf_id: str, + ses_id: Optional[str] = None, + space: str = "fsLR", + mesh_den: str = "32k", + desc: Optional[str] = "MSMSulc", + ) -> list[Path]: + """Gets preprocessed surface file. + + Args: + sub_id: SubjectID. + hemi: Brain hemisphere. Valid: L, R. + surf_id: Surface name. E.g., pial, sphere, probseg. + ses_id: SessionID. Optional. + space: Surface space name. E.g., fsLR + mesh_den: Surface mesh density. E.g., 32k + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + + Returns: + A list of preprocessed surface file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + hemi, _ = parse_hemi(hemi) + ses_id = f"_ses-{ses_id}" if ses_id else "" + desc = f"{desc}*" if desc else "" + file_list = sorted( + self.preproc_dir.joinpath(sub_id).glob( + f"**/anat/{sub_id}{ses_id}_hemi-{hemi}_" + f"space-{space}_den-{mesh_den}*{desc}_{surf_id}.*.gii" + ) + ) + _ = check_file_list(file_list, n=1) + return file_list + + def get_preproc_nomedialwall_roi_file( + self, + sub_id: Union[int, str], + hemi: Literal["L", "R"], + ses_id: Optional[str] = None, + ) -> list[Path]: + """Gets native space nomedialwall ROI file. + + Args: + sub_id: SubjectID. + hemi: Brain hemisphere. Valid: L, R. + surf_id: Surface file name. E.g., pial, sphere, probseg. + ses_id: SessionID. Optional. + + Returns: + A list of native space nomedialwall ROI file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + hemi, _ = parse_hemi(hemi) + ses_id = f"_ses-{ses_id}" if ses_id else "" + file_list = sorted( + self.preproc_dir.joinpath(sub_id).glob( + f"**/anat/{sub_id}{ses_id}_hemi-{hemi}_space-fsnative_" + f"den-fsnative_desc-nomedialwall_probseg.shape.gii" + ) + ) + _ = check_file_list(file_list, n=1) + return file_list + + def get_preproc_subcortical_roi_file( + self, sub_id: Union[int, str], ses_id: Optional[str] = None, space: str = "MNI152NLin6Asym" + ) -> list[Path]: + """Gets subcortical ROI file. + + Args: + sub_id: SubjectID. + ses_id: SessionID. Optional. + space: Image space. + + Returns: + A list of subcortical ROI file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + ses_id = f"_ses-{ses_id}" if ses_id else "" + file_list = sorted( + self.preproc_dir.joinpath(sub_id).glob( + f"**/anat/{sub_id}{ses_id}_space-{space}*_desc-Subcortical_dseg.nii.gz" + ) + ) + _ = check_file_list(file_list, n=1) + return file_list + + def get_preproc_anat_cifti_file( + self, + sub_id: Union[int, str], + metric: str, + ses_id: Optional[str] = None, + space: str = "fsLR", + mesh_den: str = "32k", + desc: Optional[str] = "MSMSulc", + ) -> list[Path]: + """Gets preprocessed surface metric CIFTI file. + + Args: + sub_id: SubjectID. + metric: Surface metric name. E.g., curv, distortion. + ses_id: SessionID. Optional. + space: Surface space name. E.g., fsLR + mesh_den: Surface mesh density. E.g., 32k + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + + Returns: + A list of preprocessed surface metric CIFTI file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + ses_id = f"_ses-{ses_id}" if ses_id else "" + desc = f"{desc}*" if desc else "" + file_list = sorted( + self.preproc_dir.joinpath(sub_id).glob( + f"**/anat/{sub_id}{ses_id}_space-{space}_den-{mesh_den}*{desc}_{metric}.*.nii" + ) + ) + _ = check_file_list(file_list, n=1) + return file_list + + def get_preproc_func_cifti_file( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + metric: str = "bold", + space: str = "fsLR", + den: str = "32k", + desc: Optional[str] = "sm0pt0", + exclude: bool = False, + ) -> list[Path]: + """Gets preprocessed functional CIFTI file. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + metric: Functional metric name. E.g., bold, mean. + space: Surface space name. E.g., fsLR + mesh_den: Surface mesh density. E.g., 32k + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A list of preprocessed functional CIFTI file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + task_id = conform_task_id(task_id, with_prefix=True) + run_list = self.get_run_id(sub_id, task_id, run_id, exclude=exclude) + if not ((len(run_list) == 1) & (run_list[0] == "")): + run_list = [f"_{i}" for i in run_list] + ses_id = f"_ses-{ses_id}" if ses_id else "" + desc = f"{desc}*" if desc else "" + file_list = [] + for run_id in run_list: + file_list += sorted( + self.preproc_dir.joinpath(sub_id).glob( + f"**/{sub_id}{ses_id}_{task_id}{run_id}_" + f"space-{space}_den-{den}*{desc}_{metric}.*.nii" + ) + ) + _ = check_file_list(file_list, n=len(run_list)) + return file_list + + ############################## + # Singletrial estimation files + ############################## + + def get_singletrial_response_cifti_file( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + metric: str = "beta", + space: str = "fsLR", + den: str = "32k", + desc: Optional[str] = "sm0pt0", + exclude: bool = False, + ) -> list[Path]: + """Gets singletrial response CIFTI file. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + metric: Functional metric name. E.g., beta, tstat. + space: Surface space name. E.g., fsLR + mesh_den: Surface mesh density. E.g., 32k + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A list of singletrial response CIFTI file. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + task_id = conform_task_id(task_id, with_prefix=True) + run_list = self.get_run_id(sub_id, task_id, run_id, exclude=exclude) + if not ((len(run_list) == 1) & (run_list[0] == "")): + run_list = [f"_{i}" for i in run_list] + ses_id = f"_ses-{ses_id}" if ses_id else "" + desc = f"_desc-{desc}" if desc else "" + file_list = [] + for run_id in run_list: + file_list += sorted( + self.singletrial_dir.joinpath(f"{sub_id}").glob( + f"**/{sub_id}{ses_id}_{task_id}{run_id}_" + f"space-{space}_den-{den}{desc}_{metric}.dscalar.nii" + ) + ) + _ = check_file_list(file_list, n=len(run_list)) + return file_list + + ################ + # Metadata files + ################ + + def get_metadata_file(self, name: str, ext: str = "yaml") -> list[Path]: + """Gets metadata file. + + This function gets a metadate file under + ${project_root_dir}/data/metadata directory. + + Returns: + A list of metadata file. + """ + + file_list = [self.metadata_dir.joinpath(f"{name}.{ext}")] + _ = check_file_list(file_list, n=1) + return file_list + + def get_template_info_file(self) -> list[Path]: + """Gets brain template infomation file. + + This function gets the template.yaml under + ${project_root_dir}/data/metadata directory. + + Returns: + A list of brain template information file. + """ + return self.get_metadata_file("template") + + def get_atlas_info_file(self) -> list[Path]: + """Gets brain atlas infomation file. + + This function gets the atlas.yaml under + ${project_root_dir}/data/metadata directory. + + Returns: + A list of the atlas information file. + """ + return self.get_metadata_file("atlas") + + def get_roi_definition_file(self) -> list[Path]: + """Gets default ROI definition file. + + This function gets the roi_definition.yaml under + ${project_root_dir}/data/metadata directory. + + Returns: + A list of the ROI definition file. + """ + return self.get_metadata_file("roi_definition") + + def get_roi_list_file(self) -> list[Path]: + """Gets ROI list file. + + This function gets the roi_list.yaml under + ${project_root_dir}/data/metadata directory. + + Returns: + A list of the ROI list file. + """ + return self.get_metadata_file("roi_list") + + ################ + # External files + ################ + + def get_std_surf_file( + self, template_name: str, hemi: Literal["L", "R"], surf_id: str, desc: Optional[str] = None + ) -> list[Path]: + """ + Gets template surface file. + + Args: + template_name: Template name. The name shoule be defined in + metadata file template.yaml. + hemi: Brain hemisphere. Valid: L, R. + surf_id: Surface name. E.g., pial, sphere. + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + + Returns: + A list of template surface file. + """ + + with open(self.get_template_info_file()[0], "r") as f: + template_info = yaml.load(f, Loader=yaml.CLoader) + if template_name not in template_info.keys(): + raise ValueError(f"Template {template_name} is not defined in the template.yaml file.") + temp_dir = template_info[template_name] + desc = f"{desc}*" if desc else "" + file_list = sorted( + self.base_dir.joinpath(temp_dir).glob(f"*hemi-{hemi}*{desc}_{surf_id}.*.gii") + ) + _ = check_file_list(file_list, n=1) + return file_list + + def get_std_nomedialwall_roi_file( + self, template_name: str, hemi: Literal["L", "R"], desc: Optional[str] = "nomedialwall" + ) -> list[Path]: + """ + Gets template nomedialwall ROI file. + + Args: + template_name: Template name. The name shoule be defined in + metadata file template.yaml. + hemi: Brain hemisphere. Valid: L, R. + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + + Returns: + A list of template nomedialwall ROI file. + """ + + with open(self.get_template_info_file()[0], "r") as f: + template_info = yaml.load(f, Loader=yaml.CLoader) + if template_name not in template_info.keys(): + raise ValueError(f"Template {template_name} is not defined in the template.yaml file.") + temp_dir = template_info[template_name] + desc = f"{desc}*" if desc else "" + file_list = sorted( + self.base_dir.joinpath(temp_dir).glob(f"*hemi-{hemi}*{desc}_probseg.shape.gii") + ) + _ = check_file_list(file_list, n=1) + return file_list + + def get_std_subcortical_roi_file( + self, template_name: str, space: str = "MNI152NLin6Asym" + ) -> list[Path]: + """ + Gets template subcortical ROI file. + + Args: + template_name: Template name. The name shoule be defined in + metadata file template.yaml. + space: Image space. + + Returns: + A list of template subcortical ROI file. + """ + + with open(self.get_template_info_file()[0], "r") as f: + template_info = yaml.load(f, Loader=yaml.CLoader) + if template_name not in template_info.keys(): + raise ValueError(f"Template {template_name} is not defined in the template.yaml file.") + temp_dir = template_info[template_name] + file_list = sorted( + self.base_dir.joinpath(temp_dir).glob(f"*space-{space}*_desc-Subcortical_dseg.nii.gz") + ) + _ = check_file_list(file_list, n=1) + return file_list + + def get_std_cifti_file( + self, template_name: str, metric: str, desc: Optional[str] = None + ) -> list[Path]: + """ + Gets template CIFTI file. + + Args: + template_name: Template name. The name shoule be defined in + metadata file template.yaml. + metric: Surface metric name. E.g., curv, dseg. + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + + Returns: + A list of template CIFTI file. + """ + + with open(self.get_template_info_file()[0], "r") as f: + template_info = yaml.load(f, Loader=yaml.CLoader) + if template_name not in template_info.keys(): + raise ValueError(f"Template {template_name} is not defined in the template.yaml file.") + temp_dir = template_info[template_name] + desc = f"{desc}*" if desc else "" + file_list = sorted(self.base_dir.joinpath(temp_dir).glob(f"*{desc}_{metric}.*.nii")) + _ = check_file_list(file_list, n=1) + return file_list + + def get_atlas_file( + self, atlas_id: str, space: str = "fsLR", sub_id: Optional[Union[str, int]] = None + ) -> list[Path]: + """Gets standard atlas file. + + Args: + atlas_id: Atlas name. + space: Atlas space. + sub_id: Subject ID. Optional. Only required if the filename + of the atlas contains subject-specific part. + Returns: + A list of atlas files corresponding to left and right brain + hemisphere. Either file could be None. Both hemisphere could + be the same file. + + Raises: + KeyError: Given atlas_id or space is not found in atlas.yaml + file. + """ + + if sub_id: + sub_id = conform_sub_id(sub_id, with_prefix=True) + with open(self.get_atlas_info_file()[0], "r") as f: + atlas_info = yaml.load(f, Loader=yaml.CLoader) + if space not in atlas_info.keys(): + raise KeyError( + f"Atlas space {space} is not found in atlas.yaml file. " + f"Possible value: {', '.join(atlas_info.keys())}." + ) + if atlas_id not in atlas_info[space].keys(): + raise KeyError( + f"Atlas {atlas_id} ({space} space) is not found in atlas.yaml file. " + f"Possible value: {', '.join(atlas_info[space].keys())}." + ) + + atlas_file = [] + for hemi in ["L", "R"]: + if hemi in atlas_info[space][atlas_id].keys(): + # allow subject specific atlas file + fname = atlas_info[space][atlas_id][hemi].format(sub_id=sub_id) + f = Path(self.base_dir, fname) + _ = check_file_list([f], n=1) + atlas_file.append(f) + else: + atlas_file.append(None) + return atlas_file + + ################## + # Utility function + ################## + + def get_run_id( + self, + sub_id: Union[str, int], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + exclude: bool = False, + ) -> list[str]: + """Gets run list of a task. + + This function reads the default run list of a task from data + validation metadata and excludes bad runs based on the metadata + if requested. + If there is no run_id available (task with only a single run), + it will return [""]. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A list of run_id of a task. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + task_id = conform_task_id(task_id, with_prefix=True) + default_list = self.task_info[task_id]["run_list"] + exclude_info = self.task_info[task_id]["exclude"] + # Check if the subject has excluded run + bad_list = exclude_info[sub_id] if sub_id in exclude_info else [] + # Single run task + if len(default_list) == 0: + if (sub_id in exclude_info) and exclude: + run_list = [] + else: + run_list = [""] + # Multiple runs without selection + elif run_id is None: + if exclude: + run_list = [i for i in default_list if i not in bad_list] + else: + run_list = default_list + # Multiple runs with selection + else: + run_list = run_id if isinstance(run_id, list) else [run_id] + run_list = [conform_run_id(i, with_prefix=True) for i in run_list] + if exclude: + run_list = [i for i in run_list if i not in bad_list] + return run_list + + +class Project(Layout): + """Generic project class. + + This class provides a series of functions to find, read and + manipulate project files. + + Attributes: + bids_subject_list: All SubjectID in the BIDS dataset. + subject_list: SubjectID after data exclusion. + task_list: All TaskID in the BIDS dataset. + task_info: A dict contains information of the task. Usually it + contains information of data exclusion. + func_regex: A regular expression to match a task related file. + """ + + def __init__(self, project_root_dir): + super().__init__(project_root_dir) + + ############### + # Tabular files + ############### + + def read_beh( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + suffix: str = "events", + modifier: Optional[str] = None, + exclude: bool = False, + ) -> pd.DataFrame: + """Reads behavior file to dataframe. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + suffix: Filename suffix after modifier and before extexsion. + modifier: Any possible filename modifier after session and + before suffix part. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A dataframe of behavior data. + """ + + file_list = self.get_beh_file( + sub_id, + run_id=run_id, + task_id=task_id, + ses_id=ses_id, + suffix=suffix, + modifier=modifier, + exclude=exclude, + ) + df = [pd.read_csv(f, sep="\t") for f in file_list] + df = pd.concat(df).reset_index(drop=True) + + return df + + def read_confound( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + confound_list: Optional[list[str]] = [ + "trans_x", + "trans_y", + "trans_z", + "rot_x", + "rot_y", + "rot_z", + "framewise_displacement", + "std_dvars", + "dvars", + "rmsd", + "global_signal", + "csf", + "white_matter", + "csf_wm", + ], + exclude: bool = False, + ) -> pd.DataFrame: + """Reads confound regressor timeseries to dataframe. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + confound_list: A list of confound regressor names. If it's + None, reading all columns except PCA component related + ones. + exclude: If true, exclude runs based on data validation + metadata. + + Returns: + A dataframe of confound regressors generated by fMRIPrep. + """ + + sub_id = conform_sub_id(sub_id, with_prefix=True) + file_list = self.get_confound_file( + sub_id, task_id, run_id=run_id, ses_id=ses_id, exclude=exclude + ) + df = [] + for f in file_list: + df_run = pd.read_csv(f, sep="\t") + # add useful information + df_run["sub_id"] = sub_id + df_run["task_id"] = task_id + df_run["run_id"] = re.search(self.func_regex, f.name).group("run_id") + # filter out a/t comp columns + col_list = df_run.columns.tolist() + col_list = [i for i in df_run.columns.tolist()[:-3] if "comp_cor" not in i] + df_run = df_run.loc[:, ["sub_id", "task_id", "run_id"] + col_list] + df.append(df_run) + df = pd.concat(df).reset_index(drop=True) + # Select column if requested + if confound_list: + df = df.loc[:, ["sub_id", "task_id", "run_id"] + confound_list] + return df + + ############################### + # Preprocessed functional files + ############################### + + def read_preproc_func_cifti( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + metric: str = "bold", + space: str = "fsLR", + den: str = "32k", + desc: Optional[str] = "sm0pt0", + exclude: bool = False, + volume_as_img: bool = False, + standardize: Optional[Literal["zscore"]] = None, + ) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """Reads preprocessed func data from CIFTI file. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + metric: Functional metric name. E.g., bold, mean. + space: Surface space name. E.g., fsLR + mesh_den: Surface mesh density. E.g., 32k + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + exclude: If true, exclude runs based on data validation + metadata. + volume_as_img: If true, the volume part in the CIFTI image + is extracted as a nib.nifti1.Nifti1Image object. If + false, it's extracted as a numpy array. + standardize: Standardize each vertex/voxel along the time + dimension. Valid: zscore. + + Returns: + A dict contains splitted CIFTI data. The keys are SurfaceL, + SurfaceR, and Volume. If in_file is a list of filenames, the + data will be concatenate along the time dimension (row). + """ + + file_list = self.get_preproc_func_cifti_file( + sub_id, + task_id, + run_id=run_id, + ses_id=ses_id, + metric=metric, + space=space, + den=den, + desc=desc, + exclude=exclude, + ) + return read_dtseries( + file_list, volume_as_img=volume_as_img, standardize=standardize, dtype=np.float32 + ) + + def read_preproc_func_cifti_roi( + self, + sub_id: Union[int, str], + task_id: str, + roi_mask: Union[ + dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + list[dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]], + ], + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + metric: str = "bold", + space: str = "fsLR", + den: str = "32k", + desc: Optional[str] = "sm0pt0", + exclude: bool = False, + standardize: Optional[Literal["zscore"]] = None, + single_data_array: bool = True, + ) -> Union[ + np.ndarray, + list[np.ndarray], + dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + list[dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]], + ]: + """Reads preprocessed func data within ROI from CIFTI file. + + This function could read multiple ROI data at once. It's faster + than a explicit for loop, since this method only reads the whole + data once. In that case, the ROI data will be in a list instead + of a single numpy array of dict. + + Args: + sub_id: SubjectID. + task_id: TaskID. + roi_mask: A (list of) ROI mask dict. It is usually generated + by the 'make_roi_from_spec' function. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + metric: Functional metric name. E.g., bold, mean. + space: Surface space name. E.g., fsLR + mesh_den: Surface mesh density. E.g., 32k + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + exclude: If true, exclude runs based on data validation + metadata. + standardize: Standardize each vertex/voxel along the time + dimension. Valid: zscore. + single_data_array: If true, concatenate all parts into a + single numpy array along columns. Order: SurfaceL, + SurfaceR, Volume. + + Returns: + Depending on the inputs, the returned ROI data could be in + several format. + If the 'single_data_array' option is True (default), the ROI + data will be contained in a numpy array. If it's False, the + ROI data will be in a dict like the roi_mask. + If the 'roi_mask' is a list of ROI mask dict, the data of + each ROI will be in a list, and the order is the same as the + 'roi_mask'. + Data of multiple runs will always be concatenated along the + time (row) dimension. + """ + + file_list = self.get_preproc_func_cifti_file( + sub_id, + task_id, + run_id=run_id, + ses_id=ses_id, + metric=metric, + space=space, + den=den, + desc=desc, + exclude=exclude, + ) + return read_dtseries_roi( + file_list, + roi_mask, + standardize=standardize, + single_data_array=single_data_array, + dtype=np.float32, + ) + + ############################## + # Singletrial estimation files + ############################## + + def read_singletrial_response_cifti( + self, + sub_id: Union[int, str], + task_id: str, + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + metric: str = "beta", + space: str = "fsLR", + den: str = "32k", + desc: Optional[str] = "sm0pt0", + exclude: bool = False, + volume_as_img: bool = False, + standardize: Optional[Literal["zscore"]] = None, + ) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """Reads singletrial response estimation from CIFTI file. + + Args: + sub_id: SubjectID. + task_id: TaskID. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + metric: Functional metric name. E.g., bold, mean. + space: Surface space name. E.g., fsLR + mesh_den: Surface mesh density. E.g., 32k + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + exclude: If true, exclude runs based on data validation + metadata. + volume_as_img: If true, the volume part in the CIFTI image + is extracted as a nib.nifti1.Nifti1Image object. If + false, it's extracted as a numpy array. + standardize: Standardize each vertex/voxel along the trial + dimension. Valid: zscore. + + Returns: + A dict contains splitted CIFTI data. The keys are SurfaceL, + SurfaceR, and Volume. If in_file is a list of filenames, the + data will be concatenate along the trial dimension (row). + """ + + file_list = self.get_singletrial_response_cifti_file( + sub_id, + task_id, + run_id=run_id, + ses_id=ses_id, + metric=metric, + space=space, + den=den, + desc=desc, + exclude=exclude, + ) + return read_dscalar( + file_list, volume_as_img=volume_as_img, standardize=standardize, dtype=np.float32 + ) + + def read_singletrial_response_cifti_roi( + self, + sub_id: Union[int, str], + task_id: str, + roi_mask: Union[ + dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + list[dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]], + ], + run_id: Optional[Union[str, int, list[str], list[int]]] = None, + ses_id: Optional[str] = None, + metric: str = "beta", + space: str = "fsLR", + den: str = "32k", + desc: Optional[str] = "sm0pt0", + exclude: bool = False, + standardize: Optional[Literal["zscore"]] = None, + single_data_array: bool = True, + ) -> Union[ + np.ndarray, + list[np.ndarray], + dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + list[dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]], + ]: + """Reads singletrial response data within ROI from CIFTI file. + + This function could read multiple ROI data at once. It's faster + than a explicit for loop, since this method only reads the whole + data once. In that case, the ROI data will be in a list instead + of a single numpy array of dict. + + Args: + sub_id: SubjectID. + task_id: TaskID. + roi_mask: A (list of) ROI mask dict. It is usually generated + by the 'make_roi_from_spec' function. + run_id: RunID. Optional. + ses_id: SessionID. Optional. + metric: Functional metric name. E.g., bold, mean. + space: Surface space name. E.g., fsLR + mesh_den: Surface mesh density. E.g., 32k + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + exclude: If true, exclude runs based on data validation + metadata. + standardize: Standardize each vertex/voxel along the trial + dimension. Valid: zscore. + single_data_array: If true, concatenate all parts into a + single numpy array along columns. Order: SurfaceL, + SurfaceR, Volume. + + Returns: + Depending on the inputs, the returned ROI data could be in + several format. + If the 'single_data_array' option is True (default), the ROI + data will be contained in a numpy array. If it's False, the + ROI data will be in a dict like the roi_mask. + If the 'roi_mask' is a list of ROI mask dict, the data of + each ROI will be in a list, and the order is the same as the + 'roi_mask'. + Data of multiple runs will always be concatenated along the + time (row) dimension. + """ + + file_list = self.get_singletrial_response_cifti_file( + sub_id, + task_id, + run_id=run_id, + ses_id=ses_id, + metric=metric, + space=space, + den=den, + desc=desc, + exclude=exclude, + ) + return read_dscalar_roi( + file_list, + roi_mask, + standardize=standardize, + single_data_array=single_data_array, + dtype=np.float32, + ) + + ############# + # ROI related + ############# + + def read_atlas_info( + self, atlas_file: Optional[PathLike] = None + ) -> dict[str, dict[str, dict[str, str]]]: + """Reads atlas information. + + Args: + atlas_info_file: An atlas information yaml file. + If None, read the default file inside metadata directory. + + Returns: + A dict contains atlas information. Each key represents a + space where the atlas is in. The value is a dict mapping + atlas name to its file information. + For each atlas, the file information is stored in a dict + which has two items (key: L, R) indicate files of left and + right brain hemisphere (could be same file if the atlas file + is bilateral). + """ + + if atlas_file is None: + # Use default file if not provided + atlas_file = self.get_atlas_info_file()[0] + with open(atlas_file, "r") as f: + atlas_info = yaml.load(f, Loader=yaml.CLoader) + return atlas_info + + def read_roi_spec(self, roi_spec_file: Optional[PathLike] = None) -> dict[str, dict[str, Any]]: + """Reads ROI specification. + + Args: + roi_spec_file: A ROI specification yaml file. + If None, read the default file insidemetadata directory. + + Returns + ------- + dict + A dict contains ROI specification. Each keys represents a + ROI. The value is the specification which is also a dict. + Common items in the dict are AtlasID, ROIType, IndexL, + IndexR, LabelL and LabelR. There could be additional items + present in the dict. + """ + + if roi_spec_file is None: + # Use default file if not provided + roi_spec_file = self.get_roi_definition_file()[0] + with open(roi_spec_file, "r") as f: + roi_spec = yaml.load(f, Loader=yaml.CLoader) + return roi_spec + + def read_roi_list(self, roi_list_file: Optional[PathLike] = None) -> list[str]: + """Reads ROI list. + + Args: + roi_list_file: A ROI list yaml file. + If None, read the default file inside metadata directory. + + Returns + A list of ROI names. + """ + + if roi_list_file is None: + # Use default file if not provided + roi_list_file = self.get_roi_list_file()[0] + with open(roi_list_file, "r") as f: + roi_list = yaml.load(f, Loader=yaml.CLoader) + return roi_list + + def make_roi_from_spec( + self, + roi_id: str, + roi_spec: dict[str, dict[str, Any]], + sub_id: Optional[Union[str, int]] = None, + atlas_file: Optional[list[PathLike]] = None, + ) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """Makes ROI mask based on given ROI specification. + + Args: + roi_id: ROI name. + roi_spec: ROI specification. Usually it generates by the + 'read_roi_spec' function. + sub_id: Subject ID. Optional. Only required if the filename + of the atlas contains subject-specific part. + atlas_file: Atlas files used for creating ROI. Optional. By + default, atlas files are retrieved by the + 'get_atlas_file' function. It should be a list of + filenames corresponding to left and right brain + hemisphere. Either file could be None. Both hemispheres + could be the same file. + + Returns: + A dict with 3 items. The keys are SurfaceL, SurfaceR and + Volume, corresponding to the left, right brain hemisphere + and the volume part. Usually a ROI could be either in + surface or volume format, but not both. + Surface mask is represented in a numpy array. Volume mask is + represented in a nib.nifti1.Nifti1Image image. + """ + + roi_name, _ = parse_roi_id(roi_id) + atlas_id = roi_spec[roi_name]["AtlasID"] + space = roi_spec[roi_name]["AtlasSpace"] + if atlas_file is None: + atlas_file = self.get_atlas_file(atlas_id, space=space, sub_id=sub_id) + roi_mask = make_roi_from_spec(roi_id, roi_spec, atlas_file) + return roi_mask + + def unmask( + self, + data: np.ndarray, + roi_mask: dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]], + ) -> dict[str, Union[np.ndarray, nib.nifti1.Nifti1Image]]: + """Reshapes ROI data back into its original shape. + + Args: + data: Any data (a numpy array) generates by custom ROI data + reading function in the package + (e.g., read_preproc_func_cifti_roi). + roi_mask: The ROI mask dict used to read the data. + + Returns: + A dict with 3 items contains the unmasked data. The keys are + SurfaceL, SurfaceR and Volume, corresponding to the left, right + brain hemisphere and the volume part. + Surface mask is represented in a numpy array. Volume mask is + represented in a nib.nifti1.Nifti1Image image. + + Raises: + ValueError: data is not a 1-d or 2d numpy array. + ValueError: roi_mask is invalid. + """ + + return unmask(data, roi_mask) + + ########################## + # Quick plotting functions + ########################## + + def view_img( + self, + stat_map_img: nib.nifti1.Nifti1Image, + bg_img: Union[ + Literal["T1w", "MNI152NLin2009cAsym", "MNI152NLin6Asym", "MNI152"], + nib.nifti1.Nifti1Image, + ] = "MNI152", + sub_id: Optional[str] = None, + ses_id: Optional[str] = None, + modifier: Optional[str] = None, + **kwargs, + ) -> StatMapView: + """Views a volume image. + + Args: + stat_map_img: A nib.nifti1.Nifti1Image image. + bg_img: Background image to plot on top of. It could be + a nib.nifti1.Nifti1Image image object or a string. Valid + string: T1w, MNI152NLin2009cAsym, MNI152NLin6Asym, + MNI152. If it's MNI152, using nilearn's MNI152 template + and ignoring argument sub_id, ses_id and modifier. + sub_id: SubjectID. Optional. + ses_id: SessionID. Optional. + modifier: Any possible filename modifier after space and + before suffix part. Optional see function + 'get_fmriprep_anat_file'. + **kwargs: Additional keyword arguments pass to nilearn + 'view_img' function. + + Returns: + A nilearn StatMapView object. It can be saved as an html + page html_view.save_as_html('test.html'), or opened in a + browser html_view.open_in_browser(). If the output is not + requested and the current environment is a Jupyter notebook, + the viewer will be inserted in the notebook. + """ + + import nilearn.plotting as nlp + + valid_list = ["T1w", "MNI152NLin2009cAsym", "MNI152NLin6Asym", "MNI152"] + if isinstance(bg_img, str): + if bg_img not in valid_list: + raise ValueError(f"Unsupported bg_img. Valid: {', '.join(valid_list)}") + if (bg_img != "MNI152") and (sub_id is None): + raise ValueError( + "If a subject-specific bg_img is requested (e.g., T1w), " + "the sub_id must be specified." + ) + + # get subject specific bg_img + if isinstance(bg_img, str) and (bg_img != "MNI152"): + if bg_img == "T1w": + print( + "Warning: The 'view_img' function seem to rely on a MNI template to display " + "bg_img.\nAs a result, the non-MNI image will be cropped. Usually this is not " + "a problem for a quick look of data." + ) + bg_img = self.get_fmriprep_anat_file(sub_id, ses_id=ses_id, modifier=modifier)[0] + else: + bg_img = self.get_fmriprep_anat_file( + sub_id, ses_id=ses_id, space=bg_img, modifier=modifier + )[0] + bg_img = nib.load(bg_img) + + with warnings.catch_warnings(): # Ignore nilearn's UserWarning + warnings.simplefilter("ignore") + g = nlp.view_img(stat_map_img, bg_img=bg_img, dim=0, **kwargs) + return g + + def view_surf_data( + self, + hemi: str, + surf_map: Union[np.ndarray, PathLike], + surf_mesh: Optional[ + Union[ + Literal["pial", "wm", "midthickness", "inflated", "veryinflated"], + PathLike, + list[np.ndarray], + tuple[np.ndarray, np.ndarray], + ] + ] = "midthickness", + template_name: str = "fsLR_32k", + bg_map: Optional[Union[Literal["sulc"], np.ndarray, PathLike]] = "sulc", + desc: Optional[str] = None, + **kwargs, + ) -> SurfaceView: + """Views surface data. + + Args: + hemi: Brain hemisphere. Valid: L, R. + surf_map: A map need to be displayed on surface. The size of + the surf_map should match the number of vertices in the + surf_mesh. + surf_mesh: Surface mesh used for displaying data. It could + be a surface name in the template. It could also be a + surface mesh filename or a list of two numpy array which + represents a surface mesh. + template_name: Template name of the surface mesh. + bg_map: Background image to be plotted on the mesh + underneath the surf_data in greyscale, most likely a + sulcal depth map for realistic shading. It could + be a surface metric in the template. It could also be a + surface mesh filename or a numpy array with same size as + the number of vertices in the surf_mesh. + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + This argument will be pass to 'get_std_surf_file' + function. + **kwargs: Additional keyword arguments pass to nilearn + 'view_surf' function. + + Returns: + A nilean SurfaceView object. It can be saved as an html + page html_view.save_as_html('test.html'), or opened in a + browser html_view.open_in_browser(). If the output is not + requested and the current environment is a Jupyter notebook, + the viewer will be inserted in the notebook. + """ + + import nilearn.plotting as nlp + + # surf_map + surf_map = str(surf_map) if not isinstance(surf_map, np.ndarray) else surf_map + # surface mesh + if isinstance(surf_mesh, tuple): + surf_mesh = list(surf_mesh) + surf_mesh = str(surf_mesh) if not isinstance(surf_mesh, list) else surf_mesh + valid_list = ["pial", "wm", "midthickness", "inflated", "veryinflated"] + if surf_mesh in valid_list: + surf_mesh = str(self.get_std_surf_file(template_name, hemi, surf_mesh, desc=desc)[0]) + # bg_map + if bg_map == "sulc": + bg_map = self.get_std_surf_file(template_name, hemi, bg_map, desc=desc)[0] + bg_map = str(bg_map) if not isinstance(bg_map, np.ndarray) else bg_map + + with warnings.catch_warnings(): # Ignore nilearn's UserWarning + warnings.simplefilter("ignore") + g = nlp.view_surf(surf_mesh, surf_map=surf_map, bg_map=bg_map, **kwargs) + return g + + def view_roi_img( + self, + roi_img: nib.nifti1.Nifti1Image, + bg_img: Union[ + Literal["T1w", "MNI152NLin2009cAsym", "MNI152NLin6Asym", "MNI152"], + nib.nifti1.Nifti1Image, + ] = "MNI152", + sub_id: Optional[str] = None, + ses_id: Optional[str] = None, + modifier: Optional[str] = None, + threshold: float = 0.1, + cmap: str = "Set1", + symmetric_cmap: bool = False, + colorbar: bool = False, + vmax: float = 10, + **kwargs, + ) -> StatMapView: + """Views a volume ROI. + + Args: + roi_img: A nib.nifti1.Nifti1Image image. + bg_img: Background image to plot on top of. It could be + a nib.nifti1.Nifti1Image image object or a string. Valid + string: T1w, MNI152NLin2009cAsym, MNI152NLin6Asym, + MNI152. If it's MNI152, using nilearn's MNI152 template + and ignoring argument sub_id, ses_id and modifier. + sub_id: SubjectID. Optional. + ses_id: SessionID. Optional. + modifier: Any possible filename modifier after space and + before suffix part. Optional see function + 'get_fmriprep_anat_file'. + threshold: If None is given, the image is not thresholded. + If a string of the form "90%" is given, use the 90-th + percentile of the absolute value in the image. If a + number is given, it is used to threshold the image: + values below the threshold (in absolute value) are + plotted as transparent. If auto is given, the threshold + is determined automatically. + cmap: The colormap for specified image. + symmetric_cmap: True: make colormap symmetric (ranging from + -vmax to vmax). False: the colormap will go from the + minimum of the volume to vmax. Set it to False if you + are plotting a positive volume, e.g. an atlas or an + anatomical image. + colorbar: If True, display a colorbar on top of the plots. + vmax: Max value for mapping colors. If vmax is None and + symmetric_cmap is True, vmax is the max absolute value + of the volume. If vmax is None and symmetric_cmap is + False, vmax is the max value of the volume. + **kwargs: Additional keyword arguments pass to nilearn + 'view_img' function. + **kwargs: Additional keyword arguments pass to nilearn + 'view_img' function. + + Returns: + A nilearn StatMapView object. It can be saved as an html + page html_view.save_as_html('test.html'), or opened in a + browser html_view.open_in_browser(). If the output is not + requested and the current environment is a Jupyter notebook, + the viewer will be inserted in the notebook. + """ + + return self.view_img( + roi_img, + bg_img=bg_img, + sub_id=sub_id, + ses_id=ses_id, + modifier=modifier, + threshold=threshold, + cmap=cmap, + symmetric_cmap=symmetric_cmap, + colorbar=colorbar, + vmax=vmax, + resampling_interpolation="nearest", + **kwargs, + ) + + def view_roi_surf( + self, + hemi: str, + surf_map: Union[np.ndarray, PathLike], + surf_mesh: Optional[ + Union[ + Literal["pial", "wm", "midthickness", "inflated", "veryinflated"], + PathLike, + list[np.ndarray], + tuple[np.ndarray, np.ndarray], + ] + ] = "midthickness", + template_name: str = "fsLR_32k", + bg_map: Optional[Union[Literal["sulc"], np.ndarray, PathLike]] = "sulc", + desc: Optional[str] = None, + threshold: float = 0.1, + cmap: str = "Set1", + symmetric_cmap: bool = False, + colorbar: bool = False, + vmax: float = 10, + **kwargs, + ) -> SurfaceView: + """Views surface data. + + Args: + hemi: Brain hemisphere. Valid: L, R. + surf_map: A map need to be displayed on surface. The size of + the surf_map should match the number of vertices in the + surf_mesh. + surf_mesh: Surface mesh used for displaying data. It could + be a surface name in the template. It could also be a + surface mesh filename or a list of two numpy array which + represents a surface mesh. + template_name: Template name of the surface mesh. + bg_map: Background image to be plotted on the mesh + underneath the surf_data in greyscale, most likely a + sulcal depth map for realistic shading. It could + be a surface metric in the template. It could also be a + surface mesh filename or a numpy array with same size as + the number of vertices in the surf_mesh. + desc: The desc part in the filename. It could be part of the + full desc string, as long as it only matches one file. + This argument will be pass to 'get_std_surf_file' + function. + threshold: If None is given, the image is not thresholded. + If a string of the form "90%" is given, use the 90-th + percentile of the absolute value in the image. If a + number is given, it is used to threshold the image: + values below the threshold (in absolute value) are + plotted as transparent. If auto is given, the threshold + is determined automatically. + cmap: The colormap for specified image. + symmetric_cmap: True: make colormap symmetric (ranging from + -vmax to vmax). False: the colormap will go from the + minimum of the volume to vmax. Set it to False if you + are plotting a positive volume, e.g. an atlas or an + anatomical image. + colorbar: If True, display a colorbar on top of the plots. + vmax: Max value for mapping colors. If vmax is None and + symmetric_cmap is True, vmax is the max absolute value + of the volume. If vmax is None and symmetric_cmap is + False, vmax is the max value of the volume. + **kwargs: Additional keyword arguments pass to nilearn + 'view_img' function. + **kwargs: Additional keyword arguments pass to nilearn + 'view_surf' function. + + Returns: + A nilean SurfaceView object. It can be saved as an html + page html_view.save_as_html('test.html'), or opened in a + browser html_view.open_in_browser(). If the output is not + requested and the current environment is a Jupyter notebook, + the viewer will be inserted in the notebook. + """ + + return self.view_surf_data( + hemi, + surf_map, + surf_mesh=surf_mesh, + template_name=template_name, + bg_map=bg_map, + desc=desc, + threshold=threshold, + cmap=cmap, + symmetric_cmap=symmetric_cmap, + colorbar=colorbar, + vmax=vmax, + **kwargs, + ) diff --git a/pantheon/utils/__init__.py b/pantheon/utils/__init__.py new file mode 100644 index 0000000..a5682fb --- /dev/null +++ b/pantheon/utils/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/pantheon/utils/shell.py b/pantheon/utils/shell.py new file mode 100644 index 0000000..3eb8650 --- /dev/null +++ b/pantheon/utils/shell.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""External shell interaction functions""" + +from __future__ import annotations +from typing import Union +import sys +from shlex import split +import subprocess + + +def run_cmd( + cmd: Union[str, list[str]], + print_output: bool = True, + shell: bool = False, + check: bool = True, + **kwargs, +) -> subprocess.CompletedProcess: + """Executes command in Shell. + + Args: + cmd: Command to be executed in external shell. It could be a + string or a list of command parts (see subprocess function + 'run' for details). + print_output: If true, print out the shell outputs. + shell: If true, the command will be executed through the shell + (see subprocess doc for details). + check: If check is true, and the process exits with a non-zero + exit code, a CalledProcessError exception will be raised. + Attributes of that exception hold the arguments, the exit + code, and stdout and stderr if they were captured. + **kwargs: Additional keyword arguments pass to function 'run'. + + Returns: + A subprocess.CompletedProcess object. + """ + + try: + if shell: + if isinstance(cmd, list): + cmd = " ".join(cmd) + res = subprocess.run( + cmd, shell=True, capture_output=True, check=check, encoding="utf-8", **kwargs + ) + else: + if isinstance(cmd, str): + cmd = split(cmd) + res = subprocess.run(cmd, capture_output=True, check=check, encoding="utf-8", **kwargs) + if print_output: + if res.stdout != "": + print(res.stdout.rstrip("\n"), flush=True) + if res.stderr != "": + print(res.stderr, flush=True) + except subprocess.CalledProcessError as e: + print(e.stdout) + print(e.stderr) + sys.exit(1) + return res diff --git a/pantheon/utils/typing.py b/pantheon/utils/typing.py new file mode 100644 index 0000000..65425e1 --- /dev/null +++ b/pantheon/utils/typing.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Misc functions.""" + +# Author: Zhifang Ye +# Email: zhifang.ye.fghm@gmail.com +# Notes: + +from __future__ import annotations +from typing import Union +from pathlib import Path +from plotly.graph_objs._figure import Figure +import nilearn.plotting as nlp + +PathLike = Union[Path, str] +PlotlyFigure = Figure +StatMapView = nlp.html_stat_map.StatMapView +SurfaceView = nlp.html_surface.SurfaceView diff --git a/pantheon/utils/validation.py b/pantheon/utils/validation.py new file mode 100644 index 0000000..34d364b --- /dev/null +++ b/pantheon/utils/validation.py @@ -0,0 +1,294 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Argument validation functions""" + +from __future__ import annotations +from typing import Optional, Union, Literal + +from .typing import PathLike + + +def check_file_list(file_list: list[PathLike], n: Optional[int] = None) -> int: + """Checks list of filenames. + + Makes sure each filename in the file_list is a valid file on disk. + Optionally, checks the number of filenames meet expection. + + Args: + file_list: A list of filenames. + n: Expected number of filenames in file_list. + + Returns: + Number of filenames in the file_list. + + Raises: + ValueError: Number of filenames in the file_list doesn't meet + expectation. + FileNotFoundError: One or multiple files in the file_list is not + found. + """ + + n_file = len(file_list) + if n_file == 0: + raise ValueError("Argument file_list is empty.") + if n and n_file != n: + raise ValueError(f"Number of files in the list is {n_file}. Expecting {n}.") + for f in file_list: + if not f.is_file(): + raise FileNotFoundError(f"File {f} is not found.") + + return n_file + + +#################### +# Conform argument # +#################### + + +def conform_sub_id(sub_id: Union[int, str], with_prefix: bool = False) -> str: + """Conforms Subject ID. + + Args: + sub_id: Subject ID + with_prefix: If true, the conformed Subject ID always starts + with 'sub-'. + + Returns: + Conformed Subject ID. + If input is a int, conform it as zero padded string (e.g., 001). + + Raises: + TypeError: Input sub_id is not int or string type. + """ + + if not isinstance(sub_id, (int, str)): + raise TypeError("Argument sub_id should be a int or str.") + if isinstance(sub_id, int): + sub_id = f"{int(sub_id):03d}" + if (sub_id.startswith("sub-")) and (not with_prefix): + sub_id = sub_id[4:] + if (not sub_id.startswith("sub-")) and with_prefix: + sub_id = f"sub-{sub_id}" + return sub_id + + +def conform_run_id(run_id: Union[int, str], with_prefix: bool = False) -> str: + """Conforms Subject ID. + + Args: + run_id: Run ID + with_prefix: If true, the conformed Run ID always starts + with 'run-'. + + Returns: + Conformed Run ID. + + Raises: + TypeError: Input run_id is not int or string type. + """ + + if not isinstance(run_id, (int, str)): + raise TypeError("Argument run_id should be a int or str.") + run_id = str(run_id) + if (run_id.startswith("run-")) and (not with_prefix): + run_id = run_id[4:] + if (not run_id.startswith("run-")) and with_prefix: + run_id = f"run-{run_id}" + return run_id + + +def conform_task_id(task_id: str, with_prefix: bool = False) -> str: + """Conforms Task ID. + + Args: + task_id: Task ID + with_prefix: If true, the conformed Task ID always starts + with 'task-'. + + Returns: + Conformed Task ID. + + Raises: + TypeError: Input task_id is not a string. + """ + + if not isinstance(task_id, str): + raise TypeError("Argument task_id should be a string.") + if (task_id.startswith("task-")) and (not with_prefix): + task_id = task_id[5:] + if (not task_id.startswith("task-")) and with_prefix: + task_id = f"task-{task_id}" + return task_id + + +############################ +# Parse and check argument # +############################ + + +def parse_space(space: str, valid_list: list[str] = ["fsnative", "fsLR"]) -> str: + """Parses and validates argument 'space'. + + Args: + space: Spatial space. + valid_list: Valid values of space. + + Returns: + A valid space. + + Raises: + ValueError: space value is invalid. + """ + + if space not in valid_list: + raise ValueError(f"Invalid space: {space}. Valid: {', '.join(valid_list)}.") + return space + + +def parse_hemi( + hemi: str, + valid_list: list[str] = ["L", "R"], + structure_format: Literal["cifti", "gifti"] = "cifti", +) -> tuple[str, str]: + """Parses and validates argument 'hemi'. + + Args: + hemi: Brain hemisphere. + valid_list: Valid values of hemi. + structure_format: Surface structure name format. For example, + CORTEX_LEFT. Valid: cifti, gifti. + + Returns: + A tuple (hemi, structure), where hemi indicates left or right + hemisphere, and structure is the structure name of the surface. + For example, CORTEX_LEFT (cifti format). + + Raises: + ValueError: hemi value is invalid. + """ + + if hemi not in valid_list: + raise ValueError(f"Invalid hemi: {hemi}. Valid: {', '.join(valid_list)}.") + struc_name = { + "L": {"cifti": "CORTEX_LEFT", "gifti": "CortexLeft"}, + "R": {"cifti": "CORTEX_RIGHT", "gifti": "CortexRight"}, + } + structure = struc_name[hemi][structure_format] + return hemi, structure + + +def parse_mesh_density( + mesh_den: str, valid_list: list[str] = ["fsnative", "164k", "59k", "32k"] +) -> str: + """Parses and validates argument 'mesh_den'. + + Args: + mesh_den: Surface mesh density. + valid_list: Valid values of mesh_den. + + Returns: + A valid mesh density string. + + Raises: + ValueError: mesh_den value is invalid. + """ + + if mesh_den not in valid_list: + raise ValueError(f"Invalid mesh_den: {mesh_den}. Valid: {', '.join(valid_list)}.") + return mesh_den + + +def parse_registration_method( + registration_method: str, valid_list: list[str] = ["FS", "MSMSulc"] +) -> str: + """Parses and validates argument 'registration_method'. + + Args: + registration_method: Spatial registration method. + valid_list: Valid values of registration_method. + + Returns: + A valid registration method string. + + Raises: + ValueError: registration_method value is invalid. + """ + + if registration_method not in valid_list: + raise ValueError( + f"Invalid registration_method: {registration_method}. Valid: {', '.join(valid_list)}." + ) + return registration_method + + +def parse_smoothing_fwhm( + smoothing_fwhm: Optional[list[int, float]] = None, remove_zero: bool = False +) -> Optional[list[float]]: + """Parses and validates argument 'smoothing_fwhm'. + + Args: + smoothing_fwhm: Spatial smoothing kernal size (FWHM, mm). + It could be a number, a list of numbers or None. + remove_zero: If true, remove 0 from the smoothing_fwhm list. + + Returns: + A list of valid smoothing FWHM value. It could be None if the + input is None. + + Raises: + ValueError: smoothing_fwhm value is invalid. + """ + + if smoothing_fwhm is None: + return None + if isinstance(smoothing_fwhm, (int, float)): + smoothing_fwhm = [smoothing_fwhm] + elif isinstance(smoothing_fwhm, list): + for fwhm in smoothing_fwhm: + if not isinstance(fwhm, (int, float)): + raise ValueError("Elements in smoothing_fwhm should be a float or int.") + else: + raise ValueError("Argument smoothing_fwhm should be a number of a list of numbers.") + + conform_fwhm = [] + for fwhm in smoothing_fwhm: + # remove 0 if requested + if (fwhm != 0) or ((fwhm == 0) and (not remove_zero)): + conform_fwhm.append(float(fwhm)) + if len(conform_fwhm) == 0: + conform_fwhm = None + return conform_fwhm + + +def parse_roi_id(roi_id: str) -> tuple[str, str]: + """Parses and validates argument 'roi_id'. + + Args: + roi_id: ROI ID. It should be in the format like 'NAME-HEMI'. + For example, AG-L represents AG in left hemisphere. If '-L' + or '-R' is omitted, it represents bilateral ROI. + + Returns: + A tuple (ROI name, hemi), where ROI name indicates the name of + the ROI without hemisphere suffix, and hemi indicates left or + right brain hemisphere. If it's a bilateral ROI, hemi is LR. + Raises: + ValueError: roi_id format is incorrect. + """ + + parts = roi_id.split("-") + cond1 = (parts[-1] == "L") or (parts[-1] == "R") # ending with L or R + cond2 = len(parts) == 1 + cond3 = len(parts) > 2 + # Ensure roi_id ends with 'L', 'R' with exactly one '-'. Or it + # doesn't contain any '-' (indicates bilateral or midline ROI) + if (not (cond1 or cond2)) or cond3: + raise ValueError( + "Argument 'roi_id' should be a string with one and at most one '-'.\n" + "Before the dash is the ROI name and after dash is the hemisphere (L or R).\n" + "If this is a bilateral or midline ROI, the hemisphere should be omitted." + ) + roi_name = parts[0] + hemi = parts[1] if len(parts) == 2 else "LR" + return roi_name, hemi diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..fd9033d --- /dev/null +++ b/setup.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from setuptools import find_packages, setup + +setup( + name="pantheon", + packages=find_packages(), + version="2022.7.0", + description="Pantheon is a series of functions and workflows aimed to facilitate analyzing fMRI data.", + author="Zhifang Ye", + license="MIT", +)