diff --git a/.gitignore b/.gitignore index 4b48089..0186e34 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,8 @@ var/ .installed.cfg *.egg Pipfile.lock +**/.ipynb_checkpoints +**crash* # PyInstaller # Usually these files are written by a python script from a template @@ -64,3 +66,4 @@ target/ *.xls *.html slices* +pyscript.m diff --git a/neuro_pypes/anat/utils.py b/neuro_pypes/anat/utils.py index fb0863b..4043154 100644 --- a/neuro_pypes/anat/utils.py +++ b/neuro_pypes/anat/utils.py @@ -30,9 +30,7 @@ def biasfield_correct(anat_filepath=traits.Undefined): n4.inputs.convergence_threshold = 1e-6 #n4.inputs.bspline_order = 5 n4.inputs.save_bias = True - n4.inputs.input_image = anat_filepath - return n4 diff --git a/neuro_pypes/fmri/nuisance.py b/neuro_pypes/fmri/nuisance.py index 31b248d..c926c18 100644 --- a/neuro_pypes/fmri/nuisance.py +++ b/neuro_pypes/fmri/nuisance.py @@ -208,7 +208,7 @@ def rest_noise_filter_wf(wf_name='rest_noise_removal'): ("in_file", "realigned_files"), ("motion_params", "realignment_parameters"), ("brain_mask", "mask_file"), - ß]), + ]), # calculte motion regressors (rest_noise_input, motion_regs, [("motion_params", "motion_params")]), diff --git a/neuro_pypes/preproc/slicetime.py b/neuro_pypes/preproc/slicetime.py index 3d0a2a5..2dd5b3c 100644 --- a/neuro_pypes/preproc/slicetime.py +++ b/neuro_pypes/preproc/slicetime.py @@ -306,8 +306,7 @@ def _pick_first(sequence): ] # the input and output nodes - stc_input = setup_node(IdentityInterface(fields=input_fields), - name="stc_input") + stc_input = setup_node(IdentityInterface(fields=input_fields), name="stc_input") stc_output = setup_node(IdentityInterface(fields=["timecorrected_files", "time_repetition", diff --git a/neuro_pypes/preproc/slicetime_params.py b/neuro_pypes/preproc/slicetime_params.py index d0e63cb..5d7dadf 100644 --- a/neuro_pypes/preproc/slicetime_params.py +++ b/neuro_pypes/preproc/slicetime_params.py @@ -203,7 +203,7 @@ def calculate_slice_order(n_slices, slice_mode): img = nib.load(in_file) times = get_nii_slice_times(img) if times is not None: - return order_from_times(times) + return list(order_from_times(times)) # read the slice mode code from the file if slice_mode == 'unknown': diff --git a/scripts/dual_regression.m b/scripts/ica_multiple_regression/dual_regression.m similarity index 100% rename from scripts/dual_regression.m rename to scripts/ica_multiple_regression/dual_regression.m diff --git a/scripts/rsn_multiple_regressions.py b/scripts/ica_multiple_regression/rsn_multiple_regressions.py similarity index 100% rename from scripts/rsn_multiple_regressions.py rename to scripts/ica_multiple_regression/rsn_multiple_regressions.py diff --git a/scripts/rsn_multiple_regressions_template.m b/scripts/ica_multiple_regression/rsn_multiple_regressions_template.m similarity index 100% rename from scripts/rsn_multiple_regressions_template.m rename to scripts/ica_multiple_regression/rsn_multiple_regressions_template.m diff --git a/scripts/rest_fmri_preprocessing/README.md b/scripts/rest_fmri_preprocessing/README.md new file mode 100644 index 0000000..b1304f5 --- /dev/null +++ b/scripts/rest_fmri_preprocessing/README.md @@ -0,0 +1,32 @@ + +Build the neurita/neuro_docker container: + +``` +git clone https://github.com/Neurita/neuro_ansible.git +``` + +Install the dependencies of neuro_ansible. Then: + +``` +cd neuro_ansible +make docker-run +``` +Exit from the container and delete it. + +Run the container again with your options: +``` +export DATA_DIR=$HOME/projects/multimodal_test_data +export PYPES_DIR=$HOME/projects/neuro_pypes + +docker run -it -p 8888:8888 --name neuro -v $DATA_DIR:/data -v $PYPES_DIR:/root/projects/neuro_pypes neurita/neuro_docker:0.2 /bin/bash +``` + +Inside the container: +``` +pyenv activate neuro +pip install jupyter jupyterlab +``` + +``` +jupyter lab --ip 0.0.0.0 --no-browser --allow-root +``` diff --git a/scripts/rest_fmri_preprocessing/process.ipynb b/scripts/rest_fmri_preprocessing/process.ipynb new file mode 100644 index 0000000..c987441 --- /dev/null +++ b/scripts/rest_fmri_preprocessing/process.ipynb @@ -0,0 +1,1838 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/root/.pyenv/versions/3.6.7/lib/python3.6/importlib/_bootstrap.py:219: ImportWarning: can't resolve package from __spec__ or __package__, falling back on __name__ and __path__\n", + " return f(*args, **kwds)\n" + ] + } + ], + "source": [ + "import os\n", + "\n", + "from hansel import Crumb\n", + "from hansel.operations import joint_value_map, valuesmap_to_dict\n", + "import nipype.pipeline.engine as pe\n", + "from nipype.algorithms.misc import Gunzip\n", + "from nipype.interfaces import spm, fsl\n", + "from nipype.interfaces.utility import IdentityInterface, Function, Select\n", + "from nipype.interfaces.io import DataSink\n", + "from nipype.interfaces.ants import N4BiasFieldCorrection\n", + "from nipype.interfaces.base import traits\n", + "\n", + "from neuro_pypes.crumb import DataCrumb\n", + "from neuro_pypes.preproc.slicetime_params import STCParametersInterface\n", + "from neuro_pypes.interfaces.nilearn import math_img\n", + "from neuro_pypes.preproc import get_bounding_box\n", + "from neuro_pypes._utils import flatten_list\n", + "from neuro_pypes.utils import (\n", + " remove_ext,\n", + " joinstrings,\n", + " selectindex,\n", + " extend_trait_list\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "wf_name = 'spm_rest_preprocessing'\n", + "\n", + "#work_dir = os.path.expanduser(f'~/data/neuro_pypes/{wf_name}/')\n", + "work_dir = os.path.expanduser(f'/data/neuro_pypes/{wf_name}/')\n", + "\n", + "#input_dir = os.path.expanduser('~/projects/neuro/multimodal_test_data/raw')\n", + "input_dir = os.path.expanduser('/data/raw')\n", + "\n", + "output_dir = os.path.join(work_dir, 'out')\n", + "cache_dir = os.path.join(work_dir, 'wd')\n", + "\n", + "data_path = os.path.join(os.path.expanduser(input_dir), '{subject_id}', '{session}', '{image}')\n", + "data_crumb = Crumb(data_path, ignore_list=['.*'])\n", + "crumb_modalities = {\n", + " 'anat': [('image', 'anat_hc.nii.gz')],\n", + " 'rest': [('image', 'rest.nii.gz')]\n", + "}\n", + "\n", + "anat_voxel_sizes = [1, 1, 1]\n", + "\n", + "fmri_smoothing_kernel_fwhm = 8" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "wf = pe.Workflow(name=wf_name, base_dir=work_dir)\n", + "\n", + "# ------------------------------------------------------------------------------------------------\n", + "# DATA INPUT AND SINK\n", + "# ------------------------------------------------------------------------------------------------\n", + "datasource = pe.Node(\n", + " DataCrumb(crumb=data_crumb, templates=crumb_modalities, raise_on_empty=False),\n", + " name='selectfiles'\n", + ")\n", + "\n", + "datasink = pe.Node(\n", + " DataSink(parameterization=False, base_directory=output_dir, ),\n", + " name=\"datasink\"\n", + ")\n", + " \n", + "# basic file name substitutions for the datasink\n", + "undef_args = datasource.interface._infields\n", + "substitutions = [(name, \"\") for name in undef_args]\n", + "substitutions.append((\"__\", \"_\"))\n", + "\n", + "# datasink.inputs.substitutions = extend_trait_list(datasink.inputs.substitutions, substitutions)\n", + "\n", + "# Infosource - the information source that iterates over crumb values map from the filesystem\n", + "infosource = pe.Node(interface=IdentityInterface(fields=undef_args), name=\"infosrc\")\n", + "infosource.iterables = list(valuesmap_to_dict(joint_value_map(data_crumb, undef_args)).items())\n", + "infosource.synchronize = True\n", + "\n", + "# connect the input_wf to the datasink\n", + "joinpath = pe.Node(joinstrings(len(undef_args)), name='joinpath')\n", + "\n", + "# Connect the infosrc node to the datasink\n", + "input_joins = [(name, 'arg{}'.format(arg_no + 1)) for arg_no, name in enumerate(undef_args)]\n", + "\n", + "wf.connect([\n", + " (infosource, datasource, [(field, field) for field in undef_args]),\n", + " (datasource, joinpath, input_joins),\n", + " (joinpath, datasink, [(\"out\", \"container\")]),\n", + "])" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# ------------------------------------------------------------------------------------------------\n", + "# ANAT\n", + "# ------------------------------------------------------------------------------------------------\n", + "\n", + "# T1 preprocessing nodes\n", + "\n", + "# ANTs N4 Bias field correction\n", + "# n4 = N4BiasFieldCorrection()\n", + "# n4.inputs.dimension = 3\n", + "# n4.inputs.bspline_fitting_distance = 300\n", + "# n4.inputs.shrink_factor = 3\n", + "# n4.inputs.n_iterations = [50, 50, 30, 20]\n", + "# n4.inputs.convergence_threshold = 1e-6\n", + "# n4.inputs.save_bias = True\n", + "# n4.inputs.input_image = traits.Undefined\n", + "# biascor = pe.Node(n4, name=\"bias_correction\")\n", + "\n", + "gunzip_anat = pe.Node(Gunzip(), name=\"gunzip_anat\")\n", + "\n", + "# SPM New Segment\n", + "spm_info = spm.Info()\n", + "priors_path = os.path.join(spm_info.path(), 'tpm', 'TPM.nii')\n", + "segment = spm.NewSegment()\n", + "tissue1 = ((priors_path, 1), 1, (True, True), (True, True))\n", + "tissue2 = ((priors_path, 2), 1, (True, True), (True, True))\n", + "tissue3 = ((priors_path, 3), 2, (True, True), (True, True))\n", + "tissue4 = ((priors_path, 4), 3, (True, True), (True, True))\n", + "tissue5 = ((priors_path, 5), 4, (True, False), (False, False))\n", + "tissue6 = ((priors_path, 6), 2, (False, False), (False, False))\n", + "segment.inputs.tissues = [tissue1, tissue2, tissue3, tissue4, tissue5, tissue6]\n", + "segment.inputs.channel_info = (0.0001, 60, (True, True))\n", + "segment.inputs.write_deformation_fields = [True, True]\n", + "segment.inputs.channel_files = traits.Undefined\n", + "segment = pe.Node(segment, name=\"new_segment\")\n", + "\n", + "# Apply deformations\n", + "normalize_anat = spm.Normalize12(jobtype='write')\n", + "normalize_anat.inputs.write_voxel_sizes = anat_voxel_sizes\n", + "normalize_anat.inputs.deformation_file = traits.Undefined\n", + "normalize_anat.inputs.image_to_align = traits.Undefined\n", + "normalize_anat.inputs.write_bounding_box = traits.Undefined\n", + "warp_anat = pe.Node(normalize_anat, name=\"warp_anat\")\n", + "\n", + "tpm_bbox = pe.Node(\n", + " Function(function=get_bounding_box, input_names=[\"in_file\"], output_names=[\"bbox\"]),\n", + " name=\"tpm_bbox\"\n", + ")\n", + "tpm_bbox.inputs.in_file = priors_path\n", + "\n", + "# calculate brain mask from tissue maps\n", + "tissues = pe.Node(\n", + " IdentityInterface(fields=[\"gm\", \"wm\", \"csf\"], mandatory_inputs=True),\n", + " name=\"tissues\"\n", + ")\n", + "brain_mask = pe.Node(\n", + " Function(\n", + " function=math_img, \n", + " input_names=[\"formula\", \"out_file\", \"gm\", \"wm\", \"csf\"], \n", + " output_names=[\"out_file\"],\n", + " imports=['from neuro_pypes.interfaces.nilearn import ni2file']),\n", + " name='brain_mask'\n", + ")\n", + "brain_mask.inputs.out_file = \"tissues_brain_mask.nii.gz\"\n", + "brain_mask.inputs.formula = \"np.abs(gm + wm + csf) > 0\"\n", + "\n", + "# Connect the nodes\n", + "wf.connect([\n", + " # input to biasfieldcorrection\n", + "# (datasource, biascor, [(\"anat\", \"input_image\")]),\n", + "\n", + " # new segment\n", + "# (biascor, gunzip_anat, [(\"output_image\", \"in_file\")]),\n", + " (datasource, gunzip_anat, [(\"anat\", \"in_file\")]),\n", + " (gunzip_anat, segment, [(\"out_file\", \"channel_files\")]),\n", + "\n", + " # Normalize12\n", + " (segment, warp_anat, [(\"forward_deformation_field\", \"deformation_file\")]),\n", + " (segment, warp_anat, [(\"bias_corrected_images\", \"apply_to_files\")]),\n", + " (tpm_bbox, warp_anat, [(\"bbox\", \"write_bounding_box\")]),\n", + "\n", + " # brain mask from tissues\n", + " (segment, tissues,[\n", + " ((\"native_class_images\", selectindex, 0), \"gm\"),\n", + " ((\"native_class_images\", selectindex, 1), \"wm\"),\n", + " ((\"native_class_images\", selectindex, 2), \"csf\"),\n", + " ]),\n", + "\n", + " (tissues, brain_mask, [(\"gm\", \"gm\"), (\"wm\", \"wm\"), (\"csf\", \"csf\"),]),\n", + "\n", + " # output\n", + " (warp_anat, datasink, [(\"normalized_files\", \"anat.@mni\")]),\n", + " (segment, datasink, [(\"modulated_class_images\", \"anat.tissues.warped\"),\n", + " (\"native_class_images\", \"anat.tissues.native\"),\n", + " (\"transformation_mat\", \"anat.transform.@linear\"),\n", + " (\"forward_deformation_field\", \"anat.transform.@forward\"),\n", + " (\"inverse_deformation_field\", \"anat.transform.@inverse\"),\n", + " (\"bias_corrected_images\", \"anat.@biascor\")]),\n", + " (brain_mask, datasink, [(\"out_file\", \"anat.@brain_mask\")]),\n", + "])\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def _sum_one_to_each(slice_order): # SPM starts count from 1\n", + " return [i+1 for i in slice_order]\n", + "\n", + "def _sum_one(num):\n", + " return num + 1\n", + "\n", + "def _pick_first(sequence):\n", + " return sequence[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from nipype.interfaces.nipy.preprocess import Trim, ComputeMask\n", + "\n", + "# ------------------------------------------------------------------------------------------------\n", + "# FMRI Clean\n", + "# ------------------------------------------------------------------------------------------------\n", + "\n", + "# rs-fMRI preprocessing nodes\n", + "trim = pe.Node(Trim(), name=\"trim\")\n", + "\n", + "# slice-timing correction\n", + "params = pe.Node(STCParametersInterface(), name='stc_params')\n", + "params.inputs.time_repetition = 2\n", + "params.inputs.slice_mode = 'alt_inc'\n", + "\n", + "gunzip = pe.Node(Gunzip(), name=\"gunzip\")\n", + "\n", + "stc = spm.SliceTiming()\n", + "stc.inputs.in_files = traits.Undefined\n", + "stc.inputs.out_prefix = 'stc'\n", + "slice_timing = pe.Node(stc, name='slice_timing')\n", + "\n", + "wf.connect([\n", + " # trim\n", + " (datasource, trim, [(\"rest\", \"in_file\")]),\n", + "\n", + " # slice time correction\n", + " (trim, params, [(\"out_file\", \"in_files\")]),\n", + " \n", + " # processing nodes\n", + " (params, gunzip, [((\"in_files\", _pick_first), \"in_file\")]),\n", + " (params, slice_timing, [\n", + " ((\"slice_order\", _sum_one_to_each), \"slice_order\"),\n", + " ((\"ref_slice\", _sum_one), \"ref_slice\"),\n", + " (\"num_slices\", \"num_slices\"),\n", + " (\"time_acquisition\", \"time_acquisition\"),\n", + " (\"time_repetition\", \"time_repetition\"),\n", + " ]),\n", + " \n", + " (gunzip, slice_timing, [(\"out_file\", \"in_files\")]),\n", + " \n", + "])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# ------------------------------------------------------------------------------------------------\n", + "# FMRI Warp, Align, Filtering, Smoothing\n", + "# ------------------------------------------------------------------------------------------------\n", + "from nipype.interfaces.nipy import SpaceTimeRealigner\n", + "from nipype.algorithms.confounds import TSNR\n", + "from nipype.algorithms.rapidart import ArtifactDetect\n", + "\n", + "from neuro_pypes.fmri.nuisance import rest_noise_filter_wf\n", + "from neuro_pypes.interfaces.nilearn import mean_img, smooth_img\n", + "\n", + "\n", + "realign = pe.Node(SpaceTimeRealigner(), name='realign')\n", + "\n", + "# average\n", + "average = pe.Node(\n", + " Function(\n", + " function=mean_img,\n", + " input_names=[\"in_file\"],\n", + " output_names=[\"out_file\"],\n", + " imports=['from neuro_pypes.interfaces.nilearn import ni2file']\n", + " ),\n", + " name='average_epi'\n", + ")\n", + "\n", + "mean_gunzip = pe.Node(Gunzip(), name=\"mean_gunzip\")\n", + "\n", + "# co-registration nodes\n", + "coreg = spm.Coregister()\n", + "coreg.inputs.cost_function = \"mi\"\n", + "coreg.inputs.jobtype = 'estwrite'\n", + "\n", + "coregister = pe.Node(coreg, name=\"coregister_fmri\")\n", + "brain_sel = pe.Node(Select(index=[0, 1, 2]), name=\"brain_sel\")\n", + "\n", + "# brain mask made with EPI\n", + "epi_mask = pe.Node(ComputeMask(), name='epi_mask')\n", + "\n", + "# brain mask made with the merge of the tissue segmentations\n", + "tissue_mask = pe.Node(fsl.MultiImageMaths(), name='tissue_mask')\n", + "tissue_mask.inputs.op_string = \"-add %s -add %s -abs -kernel gauss 4 -dilM -ero -kernel gauss 1 -dilM -bin\"\n", + "tissue_mask.inputs.out_file = \"tissue_brain_mask.nii.gz\"\n", + "\n", + "# select tissues\n", + "gm_select = pe.Node(Select(index=[0]), name=\"gm_sel\")\n", + "wmcsf_select = pe.Node(Select(index=[1, 2]), name=\"wmcsf_sel\")\n", + "\n", + "# noise filter\n", + "wm_select = pe.Node(Select(index=[1]), name=\"wm_sel\")\n", + "csf_select = pe.Node(Select(index=[2]), name=\"csf_sel\")\n", + "\n", + "\n", + "# anat to fMRI registration inputs\n", + "wf.connect([\n", + "# (biascorr, coregister), [(\"output_image\", \"source\")],\n", + " (datasource, coregister, [(\"anat\", \"source\")]),\n", + " (segment, brain_sel, [(\"native_class_images\", \"inlist\")]),\n", + "])\n", + "\n", + "\n", + "wf.connect([\n", + " # motion correction\n", + " (slice_timing, realign, [(\"timecorrected_files\", \"in_file\")]),\n", + "\n", + " # coregistration target\n", + " (realign, average, [(\"out_file\", \"in_file\")]),\n", + " (average, mean_gunzip, [(\"out_file\", \"in_file\")]),\n", + " (mean_gunzip, coregister, [(\"out_file\", \"target\")]),\n", + "\n", + " # epi brain mask\n", + " (average, epi_mask, [(\"out_file\", \"mean_volume\")]),\n", + "\n", + " # coregistration\n", + " (brain_sel, coregister, [((\"out\", flatten_list), \"apply_to_files\")]),\n", + "\n", + " # tissue brain mask\n", + " (coregister, gm_select, [(\"coregistered_files\", \"inlist\")]),\n", + " (coregister, wmcsf_select, [(\"coregistered_files\", \"inlist\")]),\n", + " (gm_select, tissue_mask, [((\"out\", flatten_list), \"in_file\")]),\n", + " (wmcsf_select, tissue_mask, [((\"out\", flatten_list), \"operand_files\")]),\n", + "\n", + " # nuisance correction\n", + " (coregister, wm_select, [(\"coregistered_files\", \"inlist\",)]),\n", + " (coregister, csf_select, [(\"coregistered_files\", \"inlist\",)]),\n", + "])\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# ------------------------------------------------------------------------------------------------\n", + "# FMRI Noise removal\n", + "# ------------------------------------------------------------------------------------------------\n", + "from neuro_pypes.preproc import motion_regressors, extract_noise_components, create_regressors\n", + "from neuro_pypes.utils import selectindex, rename\n", + "\n", + "# CompCor rsfMRI filters (at least compcor_csf should be True).\n", + "filters = {\n", + " 'compcor_csf': True,\n", + " 'compcor_wm': False,\n", + " 'gsr': False\n", + "}\n", + "\n", + "# Compute TSNR on realigned data regressing polynomial up to order 2\n", + "tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr')\n", + "\n", + "# Use :class:`nipype.algorithms.rapidart` to determine which of the\n", + "# images in the functional series are outliers based on deviations in\n", + "# intensity or movement.\n", + "art = pe.Node(ArtifactDetect(), name=\"rapidart_artifacts\")\n", + "# # Threshold to use to detect motion-related outliers when composite motion is being used\n", + "art.inputs.use_differences = [True, False]\n", + "art.inputs.use_norm = True\n", + "art.inputs.zintensity_threshold = 2\n", + "art.inputs.use_norm = True\n", + "art.inputs.norm_threshold = 1\n", + "art.inputs.mask_type = 'file'\n", + "art.inputs.parameter_source = 'NiPy'\n", + "\n", + "# Compute motion regressors\n", + "motion_regs = pe.Node(\n", + " Function(\n", + " input_names=['motion_params', 'order', 'derivatives'],\n", + " output_names=['out_files'],\n", + " function=motion_regressors\n", + " ),\n", + " name='motion_regressors'\n", + ")\n", + "# motion regressors upto given order and derivative\n", + "# motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic)\n", + "motion_regs.inputs.order = 0\n", + "motion_regs.inputs.derivatives = 1\n", + "\n", + "# Create a filter to remove motion and art confounds\n", + "motart_pars = pe.Node(\n", + " Function(\n", + " input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],\n", + " output_names=['out_files'],\n", + " function=create_regressors\n", + " ),\n", + " name='motart_parameters'\n", + ")\n", + "# # number of polynomials to add to detrend\n", + "motart_pars.inputs.detrend_poly = 2\n", + "\n", + "motion_filter = pe.Node(\n", + " fsl.GLM(\n", + " out_f_name='F_mcart.nii.gz',\n", + " out_pf_name='pF_mcart.nii.gz',\n", + " demean=True\n", + " ),\n", + " name='motion_filter'\n", + ")\n", + "\n", + "# Noise confound regressors\n", + "compcor_pars = pe.Node(\n", + " Function(\n", + " input_names=['realigned_file', 'mask_file', 'num_components', 'extra_regressors'],\n", + " output_names=['components_file'],\n", + " function=extract_noise_components\n", + " ),\n", + " name='compcor_pars'\n", + ")\n", + "# Number of principal components to calculate when running CompCor. 5 or 6 is recommended.\n", + "compcor_pars.inputs.num_components = 6\n", + "\n", + "compcor_filter = pe.Node(\n", + " fsl.GLM(out_f_name='F.nii.gz', out_pf_name='pF.nii.gz', demean=True),\n", + " name='compcor_filter'\n", + ")\n", + "\n", + "# Global signal regression\n", + "gsr_pars = pe.Node(\n", + " Function(\n", + " input_names=['realigned_file', 'mask_file', 'num_components', 'extra_regressors'],\n", + " output_names=['components_file'],\n", + " function=extract_noise_components\n", + " ),\n", + " name='gsr_pars'\n", + ")\n", + "# Number of principal components to calculate when running Global Signal Regression. 1 is recommended.\n", + "gsr_pars.inputs.num_components: 1\n", + "\n", + "gsr_filter = pe.Node(\n", + " fsl.GLM(out_f_name='F_gsr.nii.gz', out_pf_name='pF_gsr.nii.gz', demean=True),\n", + " name='gsr_filter'\n", + ")\n", + "\n", + "wf.connect([\n", + " # tsnr\n", + " (realign, tsnr, [\n", + " (\"out_file\", \"in_file\"),\n", + " ]),\n", + "\n", + " # artifact detection\n", + " (tissue_mask, art, [(\"out_file\", \"mask_file\")]),\n", + " (realign, art, [\n", + " (\"out_file\", \"realigned_files\"),\n", + " (\"par_file\", \"realignment_parameters\")\n", + " ]),\n", + " \n", + " # calculte motion regressors\n", + " (realign, motion_regs, [\n", + " (\"par_file\", \"motion_params\")\n", + " ]),\n", + "\n", + " # create motion and confound regressors parameters file\n", + " (art, motart_pars, [\n", + " (\"norm_files\", \"comp_norm\"),\n", + " (\"outlier_files\", \"outliers\"),\n", + " ]),\n", + " (motion_regs, motart_pars, [\n", + " (\"out_files\", \"motion_params\")\n", + " ]),\n", + "\n", + " # motion filtering\n", + " (realign, motion_filter, [\n", + " (\"out_file\", \"in_file\"),\n", + " ((\"out_file\", rename, \"_filtermotart\"), \"out_res_name\"),\n", + " ]),\n", + " (motart_pars, motion_filter, [\n", + " ((\"out_files\", selectindex, 0), \"design\")\n", + " ]),\n", + "])\n", + "\n", + "wf.connect([\n", + " # output\n", + " (tsnr, datasink, [(\"tsnr_file\", \"rest.@tsnr\")]),\n", + " \n", + " (motart_pars, datasink, [(\"out_files\", \"rest.@motion_regressors\")]),\n", + " (motion_filter, datasink, [(\"out_res\", \"rest.@motion_corrected\")]),\n", + " (art, datasink, [\n", + " (\"displacement_files\", \"rest.artifact_stats.@displacement\"),\n", + " (\"intensity_files\", \"rest.artifact_stats.@intensity\"),\n", + " (\"norm_files\", \"rest.artifact_stats.@norm\"),\n", + " (\"outlier_files\", \"rest.artifact_stats.@outliers\"),\n", + " (\"plot_files\", \"rest.artifact_stats.@plots\"),\n", + " (\"statistic_files\", \"rest.artifact_stats.@stats\"),\n", + " ]),\n", + "])\n", + "\n", + "\n", + "last_filter = motion_filter\n", + "\n", + "# compcor filter\n", + "if filters['compcor_csf'] or filters['compcor_wm']:\n", + " wf.connect([\n", + " # calculate compcor regressor and parameters file\n", + " (motart_pars, compcor_pars, [((\"out_files\", selectindex, 0), \"extra_regressors\"), ]),\n", + " (motion_filter, compcor_pars, [(\"out_res\", \"realigned_file\"), ]),\n", + "\n", + " # the compcor filter\n", + " (motion_filter, compcor_filter, [(\"out_res\", \"in_file\"),\n", + " ((\"out_res\", rename, \"_cleaned\"), \"out_res_name\"),\n", + " ]),\n", + " (compcor_pars, compcor_filter, [(\"components_file\", \"design\")]),\n", + " (tissue_mask, compcor_filter, [(\"out_file\", \"mask\")]),\n", + "\n", + " # output\n", + " (compcor_pars, datasink, [(\"components_file\", \"rest.@compcor_regressors\")]),\n", + " ])\n", + " last_filter = compcor_filter\n", + "\n", + "# global signal regression\n", + "if filters['gsr']:\n", + " wf.connect([\n", + " # calculate gsr regressors parameters file\n", + " (last_filter, gsr_pars, [(\"out_res\", \"realigned_file\")]),\n", + " (tissue_mask, gsr_pars, [(\"out_file\", \"mask_file\")]),\n", + "\n", + " # the output file name\n", + " (tissue_mask, gsr_filter, [(\"out_file\", \"mask\")]),\n", + " (last_filter, gsr_filter, [\n", + " (\"out_res\", \"in_file\"),\n", + " ((\"out_res\", rename, \"_gsr\"), \"out_res_name\"),\n", + " ]),\n", + " (gsr_pars, gsr_filter, [(\"components_file\", \"design\")]),\n", + "\n", + " # output\n", + " (gsr_pars, datasink, [(\"components_file\", \"rest.@gsr_regressors\")]),\n", + " ])\n", + " last_filter = gsr_filter\n", + "\n", + "# connect the final nuisance correction output node\n", + "wf.connect([(last_filter, datasink, [(\"out_res\", \"rest.@nuis_corrected\")]), ])\n", + "\n", + "if filters['compcor_csf'] and filters['compcor_wm']:\n", + " mask_merge = setup_node(Merge(2), name=\"mask_merge\")\n", + " wf.connect([\n", + " ## the mask for the compcor filter\n", + " (wm_select, mask_merge, [((\"out\", flatten_list), \"in1\")]),\n", + " (csf_select, mask_merge, [((\"out\", flatten_list), \"in2\")]),\n", + " (mask_merge, compcor_pars, [(\"out\", \"mask_file\")]),\n", + " ])\n", + "\n", + "elif filters['compcor_csf']:\n", + " wf.connect([\n", + " ## the mask for the compcor filter\n", + " (csf_select, compcor_pars, [((\"out\", flatten_list), \"mask_file\")]),\n", + " ])\n", + "\n", + "elif filters['compcor_wm']:\n", + " wf.connect([\n", + " ## the mask for the compcor filter\n", + " (wm_select, compcor_pars, [((\"out\", flatten_list), \"mask_file\")]),\n", + " ])\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from neuro_pypes.fmri.filter import bandpass_filter\n", + "from neuro_pypes.interfaces.nilearn import smooth_img\n", + "\n", + "# bandpass filtering\n", + "bandpass = pe.Node(\n", + " Function(\n", + " input_names=['files', 'lowpass_freq', 'highpass_freq', 'tr'],\n", + " output_names=['out_files'],\n", + " function=bandpass_filter\n", + " ),\n", + " name='bandpass'\n", + ")\n", + "bandpass.inputs.lowpass_freq = 0.1\n", + "bandpass.inputs.highpass_freq = 0.01\n", + "\n", + "# smooth\n", + "smooth = pe.Node(\n", + " Function(\n", + " function=smooth_img,\n", + " input_names=[\"in_file\", \"fwhm\"],\n", + " output_names=[\"out_file\"],\n", + " imports=['from neuro_pypes.interfaces.nilearn import ni2file']\n", + " ),\n", + " name=\"smooth\"\n", + ")\n", + "smooth.inputs.fwhm = fmri_smoothing_kernel_fwhm\n", + "smooth.inputs.out_file = \"smooth_std_{}.nii.gz\".format(wf_name)\n", + "\n", + "\n", + "wf.connect([\n", + " # temporal filtering\n", + " (last_filter, bandpass, [(\"out_res\", \"files\")]),\n", + "\n", + " # (realign, bandpass, [(\"out_file\", \"files\")]),\n", + " (params, bandpass, [(\"time_repetition\", \"tr\")]),\n", + " (bandpass, smooth, [(\"out_files\", \"in_file\")]),\n", + "\n", + " # output\n", + " (epi_mask, datasink, [(\"brain_mask\", \"rest.@epi_brain_mask\")]),\n", + " (tissue_mask, datasink, [(\"out_file\", \"rest.@tissues_brain_mask\")]),\n", + " (realign, datasink, [\n", + " (\"out_file\", \"rest.@realigned\"),\n", + " (\"par_file\", \"rest.@motion_params\"),\n", + " ]),\n", + " (coregister, datasink, [\n", + " (\"coregistered_files\", \"rest.@tissues\"),\n", + " (\"coregistered_source\", \"rest.@anat\"),\n", + " ]),\n", + " (average, datasink, [(\"out_file\", \"rest.@avg_epi\")]),\n", + " (bandpass, datasink, [(\"out_files\", \"rest.@time_filtered\")]),\n", + " (smooth, datasink, [(\"out_file\", \"rest.@smooth\")]),\n", + "])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,116 nipype.workflow INFO:\n", + "\t Workflow spm_rest_preprocessing settings: ['check', 'execution', 'logging', 'monitoring']\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:Workflow spm_rest_preprocessing settings: ['check', 'execution', 'logging', 'monitoring']\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,207 nipype.workflow INFO:\n", + "\t Running serially.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:Running serially.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,210 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.selectfiles\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/selectfiles\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.selectfiles\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/selectfiles\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,215 nipype.workflow INFO:\n", + "\t [Node] Running \"selectfiles\" (\"neuro_pypes.crumb.DataCrumb\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"selectfiles\" (\"neuro_pypes.crumb.DataCrumb\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,223 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.selectfiles\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/utils.py:307: DeprecationWarning: use \"HasTraits.trait_set\" instead\n", + " result.outputs.set(**modify_paths(tosave, relative=True, basedir=cwd))\n", + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.selectfiles\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,224 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.trim\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/trim\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.trim\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/trim\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,228 nipype.workflow INFO:\n", + "\t [Node] Cached \"spm_rest_preprocessing.trim\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Cached \"spm_rest_preprocessing.trim\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,229 nipype.workflow INFO:\n", + "\t [Node] \"spm_rest_preprocessing.trim\" found cached.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] \"spm_rest_preprocessing.trim\" found cached.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,230 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.stc_params\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/stc_params\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.stc_params\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/stc_params\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,236 nipype.workflow INFO:\n", + "\t [Node] Running \"stc_params\" (\"neuro_pypes.preproc.slicetime_params.STCParametersInterface\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"stc_params\" (\"neuro_pypes.preproc.slicetime_params.STCParametersInterface\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,246 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.stc_params\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.stc_params\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,247 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.gunzip\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/gunzip\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.gunzip\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/gunzip\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,251 nipype.workflow INFO:\n", + "\t [Node] Running \"gunzip\" (\"nipype.algorithms.misc.Gunzip\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"gunzip\" (\"nipype.algorithms.misc.Gunzip\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,901 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.gunzip\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.gunzip\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,902 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.slice_timing\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/slice_timing\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.slice_timing\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/slice_timing\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:16,910 nipype.workflow INFO:\n", + "\t [Node] Running \"slice_timing\" (\"nipype.interfaces.spm.preprocess.SliceTiming\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"slice_timing\" (\"nipype.interfaces.spm.preprocess.SliceTiming\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:45,690 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.slice_timing\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.slice_timing\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:45,693 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.realign\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/realign\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.realign\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/realign\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:08:45,702 nipype.workflow INFO:\n", + "\t [Node] Running \"realign\" (\"nipype.interfaces.nipy.preprocess.SpaceTimeRealigner\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"realign\" (\"nipype.interfaces.nipy.preprocess.SpaceTimeRealigner\")\n", + "/root/.pyenv/versions/3.6.7/lib/python3.6/importlib/_bootstrap.py:219: ImportWarning: can't resolve package from __spec__ or __package__, falling back on __name__ and __path__\n", + " return f(*args, **kwds)\n", + "/root/.pyenv/versions/3.6.7/lib/python3.6/importlib/_bootstrap.py:219: ImportWarning: can't resolve package from __spec__ or __package__, falling back on __name__ and __path__\n", + " return f(*args, **kwds)\n", + "/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipy/algorithms/registration/groupwise_registration.py:481: UserWarning: Minimization failed\n", + " warnings.warn('Minimization failed')\n", + "/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipy/io/files.py:145: FutureWarning: Default `strict` currently False; this will change to True in a future version of nipy\n", + " ni_img = nipy2nifti(img, data_dtype = io_dtype)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:09,377 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.realign\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/utils.py:307: DeprecationWarning: use \"HasTraits.trait_set\" instead\n", + " result.outputs.set(**modify_paths(tosave, relative=True, basedir=cwd))\n", + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.realign\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:09,379 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.motion_regressors\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/motion_regressors\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.motion_regressors\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/motion_regressors\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:09,383 nipype.workflow INFO:\n", + "\t [Node] Running \"motion_regressors\" (\"nipype.interfaces.utility.wrappers.Function\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"motion_regressors\" (\"nipype.interfaces.utility.wrappers.Function\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:09,391 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.motion_regressors\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.motion_regressors\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:09,393 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.tsnr\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/tsnr\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.tsnr\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/tsnr\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:09,398 nipype.workflow INFO:\n", + "\t [Node] Running \"tsnr\" (\"nipype.algorithms.confounds.TSNR\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"tsnr\" (\"nipype.algorithms.confounds.TSNR\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:28,321 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.tsnr\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.tsnr\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:28,322 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.average_epi\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/average_epi\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.average_epi\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/average_epi\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:28,326 nipype.workflow INFO:\n", + "\t [Node] Running \"average_epi\" (\"nipype.interfaces.utility.wrappers.Function\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"average_epi\" (\"nipype.interfaces.utility.wrappers.Function\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,931 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.average_epi\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.average_epi\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,932 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.epi_mask\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/epi_mask\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.epi_mask\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/epi_mask\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,935 nipype.workflow INFO:\n", + "\t [Node] Running \"epi_mask\" (\"nipype.interfaces.nipy.preprocess.ComputeMask\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"epi_mask\" (\"nipype.interfaces.nipy.preprocess.ComputeMask\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,972 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.epi_mask\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.epi_mask\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,973 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.mean_gunzip\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/mean_gunzip\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.mean_gunzip\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/mean_gunzip\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,977 nipype.workflow INFO:\n", + "\t [Node] Running \"mean_gunzip\" (\"nipype.algorithms.misc.Gunzip\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"mean_gunzip\" (\"nipype.algorithms.misc.Gunzip\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,992 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.mean_gunzip\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.mean_gunzip\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,993 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.gunzip_anat\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/gunzip_anat\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.gunzip_anat\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/gunzip_anat\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,997 nipype.workflow INFO:\n", + "\t [Node] Cached \"spm_rest_preprocessing.gunzip_anat\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Cached \"spm_rest_preprocessing.gunzip_anat\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:30,999 nipype.workflow INFO:\n", + "\t [Node] \"spm_rest_preprocessing.gunzip_anat\" found cached.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] \"spm_rest_preprocessing.gunzip_anat\" found cached.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,0 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.new_segment\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/new_segment\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.new_segment\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/new_segment\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,8 nipype.workflow INFO:\n", + "\t [Node] Cached \"spm_rest_preprocessing.new_segment\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Cached \"spm_rest_preprocessing.new_segment\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,10 nipype.workflow INFO:\n", + "\t [Node] \"spm_rest_preprocessing.new_segment\" found cached.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] \"spm_rest_preprocessing.new_segment\" found cached.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,11 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.brain_mask\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/brain_mask\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.brain_mask\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/brain_mask\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,19 nipype.workflow INFO:\n", + "\t [Node] Cached \"spm_rest_preprocessing.brain_mask\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Cached \"spm_rest_preprocessing.brain_mask\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,21 nipype.workflow INFO:\n", + "\t [Node] \"spm_rest_preprocessing.brain_mask\" found cached.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] \"spm_rest_preprocessing.brain_mask\" found cached.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,23 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.brain_sel\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/brain_sel\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.brain_sel\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/brain_sel\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,31 nipype.workflow INFO:\n", + "\t [Node] Cached \"spm_rest_preprocessing.brain_sel\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Cached \"spm_rest_preprocessing.brain_sel\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,33 nipype.workflow INFO:\n", + "\t [Node] \"spm_rest_preprocessing.brain_sel\" found cached.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] \"spm_rest_preprocessing.brain_sel\" found cached.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,34 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.coregister_fmri\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/coregister_fmri\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.coregister_fmri\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/coregister_fmri\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,38 nipype.workflow ERROR:\n", + "\t Node coregister_fmri.a3 failed to run on host 3641d5e749ab.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "ERROR:nipype.workflow:Node coregister_fmri.a3 failed to run on host 3641d5e749ab.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,45 nipype.workflow ERROR:\n", + "\t Saving crash info to /root/projects/neuro_pypes/scripts/rest_fmri_preprocessing/crash-20181202-211231-root-coregister_fmri.a3-21e781ae-5010-4f3d-84e4-c5c9d21cf60a.pklz\n", + "Traceback (most recent call last):\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/plugins/linear.py\", line 44, in run\n", + " node.run(updatehash=updatehash)\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 408, in run\n", + " cached, updated = self.is_cached()\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 294, in is_cached\n", + " hashed_inputs, hashvalue = self._get_hashval()\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 488, in _get_hashval\n", + " self._get_inputs()\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 531, in _get_inputs\n", + " self.set_input(key, deepcopy(output_value))\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 276, in set_input\n", + " setattr(self.inputs, parameter, deepcopy(val))\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/interfaces/base/traits_extension.py\", line 341, in validate\n", + " value = super(MultiObject, self).validate(object, name, newvalue)\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/traits/trait_types.py\", line 2336, in validate\n", + " return TraitListObject( self, object, name, value )\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/traits/trait_handlers.py\", line 2313, in __init__\n", + " raise excp\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/traits/trait_handlers.py\", line 2305, in __init__\n", + " value = [ validate( object, name, val ) for val in value ]\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/traits/trait_handlers.py\", line 2305, in \n", + " value = [ validate( object, name, val ) for val in value ]\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/interfaces/base/traits_extension.py\", line 260, in validate\n", + " validated_value, ', '.join(_exts)))\n", + "traits.trait_errors.TraitError: /data/raw/subject_4/session_0/anat_hc.nii.gz is not included in allowed types: .img, .nii, .hdr\n", + "Error setting node input:\n", + "Node: coregister_fmri\n", + "input: source\n", + "results_file: /data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/selectfiles/result_selectfiles.pklz\n", + "value: /data/raw/subject_4/session_0/anat_hc.nii.gz\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "ERROR:nipype.workflow:Saving crash info to /root/projects/neuro_pypes/scripts/rest_fmri_preprocessing/crash-20181202-211231-root-coregister_fmri.a3-21e781ae-5010-4f3d-84e4-c5c9d21cf60a.pklz\n", + "Traceback (most recent call last):\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/plugins/linear.py\", line 44, in run\n", + " node.run(updatehash=updatehash)\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 408, in run\n", + " cached, updated = self.is_cached()\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 294, in is_cached\n", + " hashed_inputs, hashvalue = self._get_hashval()\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 488, in _get_hashval\n", + " self._get_inputs()\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 531, in _get_inputs\n", + " self.set_input(key, deepcopy(output_value))\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 276, in set_input\n", + " setattr(self.inputs, parameter, deepcopy(val))\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/interfaces/base/traits_extension.py\", line 341, in validate\n", + " value = super(MultiObject, self).validate(object, name, newvalue)\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/traits/trait_types.py\", line 2336, in validate\n", + " return TraitListObject( self, object, name, value )\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/traits/trait_handlers.py\", line 2313, in __init__\n", + " raise excp\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/traits/trait_handlers.py\", line 2305, in __init__\n", + " value = [ validate( object, name, val ) for val in value ]\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/traits/trait_handlers.py\", line 2305, in \n", + " value = [ validate( object, name, val ) for val in value ]\n", + " File \"/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipype/interfaces/base/traits_extension.py\", line 260, in validate\n", + " validated_value, ', '.join(_exts)))\n", + "traits.trait_errors.TraitError: /data/raw/subject_4/session_0/anat_hc.nii.gz is not included in allowed types: .img, .nii, .hdr\n", + "Error setting node input:\n", + "Node: coregister_fmri\n", + "input: source\n", + "results_file: /data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/selectfiles/result_selectfiles.pklz\n", + "value: /data/raw/subject_4/session_0/anat_hc.nii.gz\n", + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,48 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.joinpath\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/joinpath\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.joinpath\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_4/joinpath\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,53 nipype.workflow INFO:\n", + "\t [Node] Cached \"spm_rest_preprocessing.joinpath\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Cached \"spm_rest_preprocessing.joinpath\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,55 nipype.workflow INFO:\n", + "\t [Node] \"spm_rest_preprocessing.joinpath\" found cached.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] \"spm_rest_preprocessing.joinpath\" found cached.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,57 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.selectfiles\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/selectfiles\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.selectfiles\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/selectfiles\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,64 nipype.workflow INFO:\n", + "\t [Node] Running \"selectfiles\" (\"neuro_pypes.crumb.DataCrumb\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"selectfiles\" (\"neuro_pypes.crumb.DataCrumb\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,72 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.selectfiles\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.selectfiles\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,74 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.trim\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/trim\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.trim\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/trim\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,79 nipype.workflow INFO:\n", + "\t [Node] Cached \"spm_rest_preprocessing.trim\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Cached \"spm_rest_preprocessing.trim\" - collecting precomputed outputs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,80 nipype.workflow INFO:\n", + "\t [Node] \"spm_rest_preprocessing.trim\" found cached.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] \"spm_rest_preprocessing.trim\" found cached.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,82 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.stc_params\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/stc_params\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.stc_params\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/stc_params\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,89 nipype.workflow INFO:\n", + "\t [Node] Running \"stc_params\" (\"neuro_pypes.preproc.slicetime_params.STCParametersInterface\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"stc_params\" (\"neuro_pypes.preproc.slicetime_params.STCParametersInterface\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,101 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.stc_params\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.stc_params\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,103 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.gunzip\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/gunzip\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.gunzip\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/gunzip\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,109 nipype.workflow INFO:\n", + "\t [Node] Running \"gunzip\" (\"nipype.algorithms.misc.Gunzip\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"gunzip\" (\"nipype.algorithms.misc.Gunzip\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,851 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.gunzip\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.gunzip\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,852 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.slice_timing\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/slice_timing\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.slice_timing\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/slice_timing\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:31,859 nipype.workflow INFO:\n", + "\t [Node] Running \"slice_timing\" (\"nipype.interfaces.spm.preprocess.SliceTiming\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"slice_timing\" (\"nipype.interfaces.spm.preprocess.SliceTiming\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:59,946 nipype.workflow INFO:\n", + "\t [Node] Finished \"spm_rest_preprocessing.slice_timing\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Finished \"spm_rest_preprocessing.slice_timing\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:59,948 nipype.workflow INFO:\n", + "\t [Node] Setting-up \"spm_rest_preprocessing.realign\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/realign\".\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Setting-up \"spm_rest_preprocessing.realign\" in \"/data/neuro_pypes/spm_rest_preprocessing/spm_rest_preprocessing/_session_session_0_subject_id_subject_3/realign\".\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "181202-21:12:59,954 nipype.workflow INFO:\n", + "\t [Node] Running \"realign\" (\"nipype.interfaces.nipy.preprocess.SpaceTimeRealigner\")\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:nipype.workflow:[Node] Running \"realign\" (\"nipype.interfaces.nipy.preprocess.SpaceTimeRealigner\")\n", + "/root/.pyenv/versions/3.6.7/envs/neuro/lib/python3.6/site-packages/nipy/algorithms/registration/groupwise_registration.py:481: UserWarning: Minimization failed\n", + " warnings.warn('Minimization failed')\n" + ] + } + ], + "source": [ + "n_cpus = 1\n", + "\n", + "if n_cpus > 1:\n", + " wf.run(plugin=plugin, plugin_args={\"n_procs\": n_cpus})\n", + "else:\n", + " wf.run(plugin=None)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/scripts/rest_fmri_preprocessing/process.py b/scripts/rest_fmri_preprocessing/process.py new file mode 100644 index 0000000..fe52432 --- /dev/null +++ b/scripts/rest_fmri_preprocessing/process.py @@ -0,0 +1,617 @@ +#!/usr/bin/env python +# coding: utf-8 + +# In[1]: + + +import os + +from hansel import Crumb +from hansel.operations import joint_value_map, valuesmap_to_dict +import nipype.pipeline.engine as pe +from nipype.algorithms.misc import Gunzip +from nipype.interfaces import spm, fsl +from nipype.interfaces.utility import IdentityInterface, Function, Select +from nipype.interfaces.io import DataSink +from nipype.interfaces.ants import N4BiasFieldCorrection +from nipype.interfaces.base import traits + +from neuro_pypes.crumb import DataCrumb +from neuro_pypes.preproc.slicetime_params import STCParametersInterface +from neuro_pypes.interfaces.nilearn import math_img +from neuro_pypes.preproc import get_bounding_box +from neuro_pypes._utils import flatten_list +from neuro_pypes.utils import ( + remove_ext, + joinstrings, + selectindex, + extend_trait_list +) + + +wf_name = 'spm_rest_preprocessing' + +#work_dir = os.path.expanduser(f'~/data/neuro_pypes/{wf_name}/') +work_dir = os.path.expanduser(f'/data/neuro_pypes/{wf_name}/') + +#input_dir = os.path.expanduser('~/projects/neuro/multimodal_test_data/raw') +input_dir = os.path.expanduser('/data/raw') + +output_dir = os.path.join(work_dir, 'out') +cache_dir = os.path.join(work_dir, 'wd') + +data_path = os.path.join(os.path.expanduser(input_dir), '{subject_id}', '{session}', '{image}') +data_crumb = Crumb(data_path, ignore_list=['.*']) +crumb_modalities = { + 'anat': [('image', 'anat_hc.nii.gz')], + 'rest': [('image', 'rest.nii.gz')] +} + +anat_voxel_sizes = [1, 1, 1] + +fmri_smoothing_kernel_fwhm = 8 + + +wf = pe.Workflow(name=wf_name, base_dir=work_dir) + +# ------------------------------------------------------------------------------------------------ +# DATA INPUT AND SINK +# ------------------------------------------------------------------------------------------------ +datasource = pe.Node( + DataCrumb(crumb=data_crumb, templates=crumb_modalities, raise_on_empty=False), + name='selectfiles' +) + +datasink = pe.Node( + DataSink(parameterization=False, base_directory=output_dir, ), + name="datasink" +) + +# basic file name substitutions for the datasink +undef_args = datasource.interface._infields +substitutions = [(name, "") for name in undef_args] +substitutions.append(("__", "_")) + +# datasink.inputs.substitutions = extend_trait_list(datasink.inputs.substitutions, substitutions) + +# Infosource - the information source that iterates over crumb values map from the filesystem +infosource = pe.Node(interface=IdentityInterface(fields=undef_args), name="infosrc") +infosource.iterables = list(valuesmap_to_dict(joint_value_map(data_crumb, undef_args)).items()) +infosource.synchronize = True + +# connect the input_wf to the datasink +joinpath = pe.Node(joinstrings(len(undef_args)), name='joinpath') + +# Connect the infosrc node to the datasink +input_joins = [(name, 'arg{}'.format(arg_no + 1)) for arg_no, name in enumerate(undef_args)] + +wf.connect([ + (infosource, datasource, [(field, field) for field in undef_args]), + (datasource, joinpath, input_joins), + (joinpath, datasink, [("out", "container")]), +]) + + +# ------------------------------------------------------------------------------------------------ +# ANAT +# ------------------------------------------------------------------------------------------------ + +# T1 preprocessing nodes + +# ANTs N4 Bias field correction +# n4 = N4BiasFieldCorrection() +# n4.inputs.dimension = 3 +# n4.inputs.bspline_fitting_distance = 300 +# n4.inputs.shrink_factor = 3 +# n4.inputs.n_iterations = [50, 50, 30, 20] +# n4.inputs.convergence_threshold = 1e-6 +# n4.inputs.save_bias = True +# n4.inputs.input_image = traits.Undefined +# biascor = pe.Node(n4, name="bias_correction") + +gunzip_anat = pe.Node(Gunzip(), name="gunzip_anat") + +# SPM New Segment +spm_info = spm.Info() +priors_path = os.path.join(spm_info.path(), 'tpm', 'TPM.nii') +segment = spm.NewSegment() +tissue1 = ((priors_path, 1), 1, (True, True), (True, True)) +tissue2 = ((priors_path, 2), 1, (True, True), (True, True)) +tissue3 = ((priors_path, 3), 2, (True, True), (True, True)) +tissue4 = ((priors_path, 4), 3, (True, True), (True, True)) +tissue5 = ((priors_path, 5), 4, (True, False), (False, False)) +tissue6 = ((priors_path, 6), 2, (False, False), (False, False)) +segment.inputs.tissues = [tissue1, tissue2, tissue3, tissue4, tissue5, tissue6] +segment.inputs.channel_info = (0.0001, 60, (True, True)) +segment.inputs.write_deformation_fields = [True, True] +segment.inputs.channel_files = traits.Undefined +segment = pe.Node(segment, name="new_segment") + +# Apply deformations +normalize_anat = spm.Normalize12(jobtype='write') +normalize_anat.inputs.write_voxel_sizes = anat_voxel_sizes +normalize_anat.inputs.deformation_file = traits.Undefined +normalize_anat.inputs.image_to_align = traits.Undefined +normalize_anat.inputs.write_bounding_box = traits.Undefined +warp_anat = pe.Node(normalize_anat, name="warp_anat") + +tpm_bbox = pe.Node( + Function(function=get_bounding_box, input_names=["in_file"], output_names=["bbox"]), + name="tpm_bbox" +) +tpm_bbox.inputs.in_file = priors_path + +# calculate brain mask from tissue maps +tissues = pe.Node( + IdentityInterface(fields=["gm", "wm", "csf"], mandatory_inputs=True), + name="tissues" +) +brain_mask = pe.Node( + Function( + function=math_img, + input_names=["formula", "out_file", "gm", "wm", "csf"], + output_names=["out_file"], + imports=['from neuro_pypes.interfaces.nilearn import ni2file']), + name='brain_mask' +) +brain_mask.inputs.out_file = "tissues_brain_mask.nii.gz" +brain_mask.inputs.formula = "np.abs(gm + wm + csf) > 0" + +# Connect the nodes +wf.connect([ + # input to biasfieldcorrection +# (datasource, biascor, [("anat", "input_image")]), + + # new segment +# (biascor, gunzip_anat, [("output_image", "in_file")]), + (datasource, gunzip_anat, [("anat", "in_file")]), + (gunzip_anat, segment, [("out_file", "channel_files")]), + + # Normalize12 + (segment, warp_anat, [("forward_deformation_field", "deformation_file")]), + (segment, warp_anat, [("bias_corrected_images", "apply_to_files")]), + (tpm_bbox, warp_anat, [("bbox", "write_bounding_box")]), + + # brain mask from tissues + (segment, tissues,[ + (("native_class_images", selectindex, 0), "gm"), + (("native_class_images", selectindex, 1), "wm"), + (("native_class_images", selectindex, 2), "csf"), + ]), + + (tissues, brain_mask, [("gm", "gm"), ("wm", "wm"), ("csf", "csf"),]), + + # output + (warp_anat, datasink, [("normalized_files", "anat.@mni")]), + (segment, datasink, [("modulated_class_images", "anat.tissues.warped"), + ("native_class_images", "anat.tissues.native"), + ("transformation_mat", "anat.transform.@linear"), + ("forward_deformation_field", "anat.transform.@forward"), + ("inverse_deformation_field", "anat.transform.@inverse"), + ("bias_corrected_images", "anat.@biascor")]), + (brain_mask, datasink, [("out_file", "anat.@brain_mask")]), +]) + + + +def _sum_one_to_each(slice_order): # SPM starts count from 1 + return [i+1 for i in slice_order] + +def _sum_one(num): + return num + 1 + +def _pick_first(sequence): + return sequence[0] + + +from nipype.interfaces.nipy.preprocess import Trim, ComputeMask + +# ------------------------------------------------------------------------------------------------ +# FMRI Clean +# ------------------------------------------------------------------------------------------------ + +# rs-fMRI preprocessing nodes +trim = pe.Node(Trim(), name="trim") + +# slice-timing correction +params = pe.Node(STCParametersInterface(), name='stc_params') +params.inputs.time_repetition = 2 +params.inputs.slice_mode = 'alt_inc' + +gunzip = pe.Node(Gunzip(), name="gunzip") + +stc = spm.SliceTiming() +stc.inputs.in_files = traits.Undefined +stc.inputs.out_prefix = 'stc' +slice_timing = pe.Node(stc, name='slice_timing') + +wf.connect([ + # trim + (datasource, trim, [("rest", "in_file")]), + + # slice time correction + (trim, params, [("out_file", "in_files")]), + + # processing nodes + (params, gunzip, [(("in_files", _pick_first), "in_file")]), + (params, slice_timing, [ + (("slice_order", _sum_one_to_each), "slice_order"), + (("ref_slice", _sum_one), "ref_slice"), + ("num_slices", "num_slices"), + ("time_acquisition", "time_acquisition"), + ("time_repetition", "time_repetition"), + ]), + + (gunzip, slice_timing, [("out_file", "in_files")]), + +]) + + +# ------------------------------------------------------------------------------------------------ +# FMRI Warp, Align, Filtering, Smoothing +# ------------------------------------------------------------------------------------------------ +from nipype.interfaces.nipy import SpaceTimeRealigner +from nipype.algorithms.confounds import TSNR +from nipype.algorithms.rapidart import ArtifactDetect + +from neuro_pypes.fmri.nuisance import rest_noise_filter_wf +from neuro_pypes.interfaces.nilearn import mean_img, smooth_img + + +realign = pe.Node(SpaceTimeRealigner(), name='realign') + +# average +average = pe.Node( + Function( + function=mean_img, + input_names=["in_file"], + output_names=["out_file"], + imports=['from neuro_pypes.interfaces.nilearn import ni2file'] + ), + name='average_epi' +) + +mean_gunzip = pe.Node(Gunzip(), name="mean_gunzip") + +# co-registration nodes +coreg = spm.Coregister() +coreg.inputs.cost_function = "mi" +coreg.inputs.jobtype = 'estwrite' + +coregister = pe.Node(coreg, name="coregister_fmri") +brain_sel = pe.Node(Select(index=[0, 1, 2]), name="brain_sel") + +# brain mask made with EPI +epi_mask = pe.Node(ComputeMask(), name='epi_mask') + +# brain mask made with the merge of the tissue segmentations +tissue_mask = pe.Node(fsl.MultiImageMaths(), name='tissue_mask') +tissue_mask.inputs.op_string = "-add %s -add %s -abs -kernel gauss 4 -dilM -ero -kernel gauss 1 -dilM -bin" +tissue_mask.inputs.out_file = "tissue_brain_mask.nii.gz" + +# select tissues +gm_select = pe.Node(Select(index=[0]), name="gm_sel") +wmcsf_select = pe.Node(Select(index=[1, 2]), name="wmcsf_sel") + +# noise filter +wm_select = pe.Node(Select(index=[1]), name="wm_sel") +csf_select = pe.Node(Select(index=[2]), name="csf_sel") + + +# anat to fMRI registration inputs +wf.connect([ +# (biascorr, coregister), [("output_image", "source")], + (datasource, coregister, [("anat", "source")]), + (segment, brain_sel, [("native_class_images", "inlist")]), +]) + + +wf.connect([ + # motion correction + (slice_timing, realign, [("timecorrected_files", "in_file")]), + + # coregistration target + (realign, average, [("out_file", "in_file")]), + (average, mean_gunzip, [("out_file", "in_file")]), + (mean_gunzip, coregister, [("out_file", "target")]), + + # epi brain mask + (average, epi_mask, [("out_file", "mean_volume")]), + + # coregistration + (brain_sel, coregister, [(("out", flatten_list), "apply_to_files")]), + + # tissue brain mask + (coregister, gm_select, [("coregistered_files", "inlist")]), + (coregister, wmcsf_select, [("coregistered_files", "inlist")]), + (gm_select, tissue_mask, [(("out", flatten_list), "in_file")]), + (wmcsf_select, tissue_mask, [(("out", flatten_list), "operand_files")]), + + # nuisance correction + (coregister, wm_select, [("coregistered_files", "inlist",)]), + (coregister, csf_select, [("coregistered_files", "inlist",)]), +]) + + +# ------------------------------------------------------------------------------------------------ +# FMRI Noise removal +# ------------------------------------------------------------------------------------------------ +from neuro_pypes.preproc import motion_regressors, extract_noise_components, create_regressors +from neuro_pypes.utils import selectindex, rename + +# CompCor rsfMRI filters (at least compcor_csf should be True). +filters = { + 'compcor_csf': True, + 'compcor_wm': False, + 'gsr': False +} + +# Compute TSNR on realigned data regressing polynomial up to order 2 +tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr') + +# Use :class:`nipype.algorithms.rapidart` to determine which of the +# images in the functional series are outliers based on deviations in +# intensity or movement. +art = pe.Node(ArtifactDetect(), name="rapidart_artifacts") +# # Threshold to use to detect motion-related outliers when composite motion is being used +art.inputs.use_differences = [True, False] +art.inputs.use_norm = True +art.inputs.zintensity_threshold = 2 +art.inputs.use_norm = True +art.inputs.norm_threshold = 1 +art.inputs.mask_type = 'file' +art.inputs.parameter_source = 'NiPy' + +# Compute motion regressors +motion_regs = pe.Node( + Function( + input_names=['motion_params', 'order', 'derivatives'], + output_names=['out_files'], + function=motion_regressors + ), + name='motion_regressors' +) +# motion regressors upto given order and derivative +# motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic) +motion_regs.inputs.order = 0 +motion_regs.inputs.derivatives = 1 + +# Create a filter to remove motion and art confounds +motart_pars = pe.Node( + Function( + input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'], + output_names=['out_files'], + function=create_regressors + ), + name='motart_parameters' +) +# # number of polynomials to add to detrend +motart_pars.inputs.detrend_poly = 2 + +motion_filter = pe.Node( + fsl.GLM( + out_f_name='F_mcart.nii.gz', + out_pf_name='pF_mcart.nii.gz', + demean=True + ), + name='motion_filter' +) + +# Noise confound regressors +compcor_pars = pe.Node( + Function( + input_names=['realigned_file', 'mask_file', 'num_components', 'extra_regressors'], + output_names=['components_file'], + function=extract_noise_components + ), + name='compcor_pars' +) +# Number of principal components to calculate when running CompCor. 5 or 6 is recommended. +compcor_pars.inputs.num_components = 6 + +compcor_filter = pe.Node( + fsl.GLM(out_f_name='F.nii.gz', out_pf_name='pF.nii.gz', demean=True), + name='compcor_filter' +) + +# Global signal regression +gsr_pars = pe.Node( + Function( + input_names=['realigned_file', 'mask_file', 'num_components', 'extra_regressors'], + output_names=['components_file'], + function=extract_noise_components + ), + name='gsr_pars' +) +# Number of principal components to calculate when running Global Signal Regression. 1 is recommended. +gsr_pars.inputs.num_components: 1 + +gsr_filter = pe.Node( + fsl.GLM(out_f_name='F_gsr.nii.gz', out_pf_name='pF_gsr.nii.gz', demean=True), + name='gsr_filter' +) + +wf.connect([ + # tsnr + (realign, tsnr, [ + ("out_file", "in_file"), + ]), + + # artifact detection + (tissue_mask, art, [("out_file", "mask_file")]), + (realign, art, [ + ("out_file", "realigned_files"), + ("par_file", "realignment_parameters") + ]), + + # calculte motion regressors + (realign, motion_regs, [ + ("par_file", "motion_params") + ]), + + # create motion and confound regressors parameters file + (art, motart_pars, [ + ("norm_files", "comp_norm"), + ("outlier_files", "outliers"), + ]), + (motion_regs, motart_pars, [ + ("out_files", "motion_params") + ]), + + # motion filtering + (realign, motion_filter, [ + ("out_file", "in_file"), + (("out_file", rename, "_filtermotart"), "out_res_name"), + ]), + (motart_pars, motion_filter, [ + (("out_files", selectindex, 0), "design") + ]), +]) + +wf.connect([ + # output + (tsnr, datasink, [("tsnr_file", "rest.@tsnr")]), + + (motart_pars, datasink, [("out_files", "rest.@motion_regressors")]), + (motion_filter, datasink, [("out_res", "rest.@motion_corrected")]), + (art, datasink, [ + ("displacement_files", "rest.artifact_stats.@displacement"), + ("intensity_files", "rest.artifact_stats.@intensity"), + ("norm_files", "rest.artifact_stats.@norm"), + ("outlier_files", "rest.artifact_stats.@outliers"), + ("plot_files", "rest.artifact_stats.@plots"), + ("statistic_files", "rest.artifact_stats.@stats"), + ]), +]) + + +last_filter = motion_filter + +# compcor filter +if filters['compcor_csf'] or filters['compcor_wm']: + wf.connect([ + # calculate compcor regressor and parameters file + (motart_pars, compcor_pars, [(("out_files", selectindex, 0), "extra_regressors"), ]), + (motion_filter, compcor_pars, [("out_res", "realigned_file"), ]), + + # the compcor filter + (motion_filter, compcor_filter, [("out_res", "in_file"), + (("out_res", rename, "_cleaned"), "out_res_name"), + ]), + (compcor_pars, compcor_filter, [("components_file", "design")]), + (tissue_mask, compcor_filter, [("out_file", "mask")]), + + # output + (compcor_pars, datasink, [("components_file", "rest.@compcor_regressors")]), + ]) + last_filter = compcor_filter + +# global signal regression +if filters['gsr']: + wf.connect([ + # calculate gsr regressors parameters file + (last_filter, gsr_pars, [("out_res", "realigned_file")]), + (tissue_mask, gsr_pars, [("out_file", "mask_file")]), + + # the output file name + (tissue_mask, gsr_filter, [("out_file", "mask")]), + (last_filter, gsr_filter, [ + ("out_res", "in_file"), + (("out_res", rename, "_gsr"), "out_res_name"), + ]), + (gsr_pars, gsr_filter, [("components_file", "design")]), + + # output + (gsr_pars, datasink, [("components_file", "rest.@gsr_regressors")]), + ]) + last_filter = gsr_filter + +# connect the final nuisance correction output node +wf.connect([(last_filter, datasink, [("out_res", "rest.@nuis_corrected")]), ]) + +if filters['compcor_csf'] and filters['compcor_wm']: + mask_merge = setup_node(Merge(2), name="mask_merge") + wf.connect([ + ## the mask for the compcor filter + (wm_select, mask_merge, [(("out", flatten_list), "in1")]), + (csf_select, mask_merge, [(("out", flatten_list), "in2")]), + (mask_merge, compcor_pars, [("out", "mask_file")]), + ]) + +elif filters['compcor_csf']: + wf.connect([ + ## the mask for the compcor filter + (csf_select, compcor_pars, [(("out", flatten_list), "mask_file")]), + ]) + +elif filters['compcor_wm']: + wf.connect([ + ## the mask for the compcor filter + (wm_select, compcor_pars, [(("out", flatten_list), "mask_file")]), + ]) + + +# In[ ]: + + +from neuro_pypes.fmri.filter import bandpass_filter +from neuro_pypes.interfaces.nilearn import smooth_img + +# bandpass filtering +bandpass = pe.Node( + Function( + input_names=['files', 'lowpass_freq', 'highpass_freq', 'tr'], + output_names=['out_files'], + function=bandpass_filter + ), + name='bandpass' +) +bandpass.inputs.lowpass_freq = 0.1 +bandpass.inputs.highpass_freq = 0.01 + +# smooth +smooth = pe.Node( + Function( + function=smooth_img, + input_names=["in_file", "fwhm"], + output_names=["out_file"], + imports=['from neuro_pypes.interfaces.nilearn import ni2file'] + ), + name="smooth" +) +smooth.inputs.fwhm = fmri_smoothing_kernel_fwhm +smooth.inputs.out_file = "smooth_std_{}.nii.gz".format(wf_name) + + +wf.connect([ + # temporal filtering + (last_filter, bandpass, [("out_res", "files")]), + + # (realign, bandpass, [("out_file", "files")]), + (params, bandpass, [("time_repetition", "tr")]), + (bandpass, smooth, [("out_files", "in_file")]), + + # output + (epi_mask, datasink, [("brain_mask", "rest.@epi_brain_mask")]), + (tissue_mask, datasink, [("out_file", "rest.@tissues_brain_mask")]), + (realign, datasink, [ + ("out_file", "rest.@realigned"), + ("par_file", "rest.@motion_params"), + ]), + (coregister, datasink, [ + ("coregistered_files", "rest.@tissues"), + ("coregistered_source", "rest.@anat"), + ]), + (average, datasink, [("out_file", "rest.@avg_epi")]), + (bandpass, datasink, [("out_files", "rest.@time_filtered")]), + (smooth, datasink, [("out_file", "rest.@smooth")]), +]) + + +if __name__ == '__main__': + n_cpus = 1 + + if n_cpus > 1: + wf.run(plugin=plugin, plugin_args={"n_procs": n_cpus}) + else: + wf.run(plugin=None)