Skip to content

Commit 4659539

Browse files
committed
Merge pull request #17 from satra/enh/package
WIP - RF: turning bips into a package with a single command line executable
2 parents 4087d23 + 1835e40 commit 4659539

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+707
-95
lines changed

LICENSE

+13
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
Copyright [2012] [INCF, MIT]
2+
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License.

README

-6
This file was deleted.

README.rst

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Optimized Nipype_ pipelines for brain imaging
2+
=============================================
3+
4+
`Detailed description of the pipelines <https://docs.google.com/document/d/1du0c1ltvNUvH5F3paIi1V3TUWV_-SarHsrId_eEII6I/edit>`_
5+
6+
.. _Nipype: nipy.org/nipype

bin/bips

+32
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
#!/usr/bin/env python
2+
"""Meta routine to configure and run various brain imaging workflows
3+
"""
4+
5+
import argparse
6+
7+
8+
# modality
9+
# subtype
10+
# workflow
11+
# config
12+
13+
"""
14+
Examples
15+
bips -m fmri -t task -w preproc -c config.py
16+
bips -m fmri -t task -w qa -c config.py
17+
bips -m fmri -t resting -w qa -c config.py
18+
19+
bips -m dmri -w preproc -c config.py
20+
bips -m dmri -w track -c config.py
21+
22+
bips -m smri -w recon -c config.py
23+
bips -m smri -t volume -w normalize -c config.py
24+
bips -m smri -t volume -w template -c config.py
25+
bips -m smri -t surface -w template -c config.py
26+
27+
alternatively
28+
bips -i -u uuid # display info about workflow
29+
bips -u uuid -c config.json # create config for running workflow
30+
bips -r config.json # run workflow
31+
32+
"""

bips/COMMIT_INFO.txt

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
# This is an ini file that may contain information about the code state
2+
[commit hash]
3+
# The line below may contain a valid hash if it has been substituted during 'git archive'
4+
archive_subst_hash=$Format:%h$
5+
# This line may be modified by the install process
6+
install_hash=

bips/__init__.py

+58
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
2+
# vi: set ft=python sts=4 ts=4 sw=4 et:
3+
import os
4+
5+
from info import (LONG_DESCRIPTION as __doc__,
6+
URL as __url__,
7+
STATUS as __status__,
8+
__version__)
9+
10+
from numpy.testing import Tester
11+
12+
13+
class BipsTester(Tester):
14+
def test(self, label='fast', verbose=1, extra_argv=None,
15+
doctests=False, coverage=False):
16+
# setuptools does a chmod +x on ALL python modules when it
17+
# installs. By default, as a security measure, nose refuses to
18+
# import executable files. To forse nose to execute our tests, we
19+
# must supply the '--exe' flag. List thread on this:
20+
# http://www.mail-archive.com/[email protected]/msg05009.html
21+
if not extra_argv:
22+
extra_argv = ['--exe']
23+
else:
24+
extra_argv.append('--exe')
25+
super(BipsTester, self).test(label, verbose, extra_argv,
26+
doctests, coverage)
27+
# Grab the docstring from numpy
28+
#test.__doc__ = Tester.test.__doc__
29+
30+
test = BipsTester().test
31+
bench = BipsTester().bench
32+
33+
34+
def _test_local_install():
35+
""" Warn the user that running with nipy being
36+
imported locally is a bad idea.
37+
"""
38+
if os.getcwd() == os.sep.join(
39+
os.path.abspath(__file__).split(os.sep)[:-2]):
40+
import warnings
41+
warnings.warn('Running the tests from the install directory may '
42+
'trigger some failures')
43+
44+
_test_local_install()
45+
46+
# Set up package information function
47+
from pkg_info import get_pkg_info as _get_pkg_info
48+
get_info = lambda: _get_pkg_info(os.path.dirname(__file__))
49+
50+
# Cleanup namespace
51+
del _test_local_install
52+
53+
# If this file is exec after being imported, the following lines will
54+
# fail
55+
try:
56+
del Tester
57+
except:
58+
pass
File renamed without changes.

dti/config.py bips/dmri/config.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88

99
dataDir = '/mindhive/gablab/sad/PY_STUDY_DIR/Block/data'
10-
workingdir = '/mindhive/gablab/sad/PY_STUDY_DIR/Block/scripts/l1output/workflows/dti'
10+
workingdir = '/mindhive/gablab/sad/PY_STUDY_DIR/Block/scripts/l1output/workflows/dmri'
1111
subjects = ['SAD_018']
1212

1313
skeleton_thresh = 0.2
@@ -22,7 +22,7 @@ def get_datasource():
2222
'bval']),
2323
name='datasource')
2424
datasource.inputs.base_directory = os.path.abspath(dataDir)
25-
datasource.inputs.template = os.path.join(dataDir,'%s','dti','%s')
25+
datasource.inputs.template = os.path.join(dataDir,'%s','dmri','%s')
2626
datasource.inputs.template_args = dict(dwi=[['subject_id', 'diffusionseries.nii.gz']],
2727
bvec=[['subject_id', 'bvecs']],
2828
bval=[['subject_id', 'bvals']])
File renamed without changes.

bips/fmri/__init__.py

Whitespace-only changes.

fmri/base.py bips/fmri/base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -764,6 +764,6 @@ def create_first(name='modelfit'):
764764
outputspec, 'design_cov')
765765
return modelfit
766766

767-
#def normalize(name = "normalize"):
767+
#def smri(name = "smri"):
768768

769769

bips/fmri/misc/__init__.py

Whitespace-only changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.

bips/fmri/qa/__init__.py

Whitespace-only changes.
File renamed without changes.
File renamed without changes.

bips/fmri/resting/__init__.py

Whitespace-only changes.

fmri/resting/normalize_full_resting.py bips/fmri/resting/normalize_full_resting.py

+9-11
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,18 @@
11
import argparse
22
import os
33
import sys
4-
sys.path.insert(0, '../../normalize')
5-
from base import get_full_norm_workflow
4+
65
import nipype.pipeline.engine as pe
76
import nipype.interfaces.utility as util
87
from nipype.interfaces.io import FreeSurferSource
98
import nipype.interfaces.io as nio
109

10+
from ...smri.base import get_full_norm_workflow
1111

1212
pickfirst = lambda x: x[0]
1313

1414

1515
def func_datagrabber(name="resting_output_datagrabber"):
16-
import nipype.pipeline.engine as pe
17-
import nipype.interfaces.io as nio
1816
# create a node to obtain the functional images
1917
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id',
2018
'fwhm'],
@@ -35,7 +33,7 @@ def func_datagrabber(name="resting_output_datagrabber"):
3533
return datasource
3634

3735

38-
def normalize_workflow(name="normalize"):
36+
def normalize_workflow(name="smri"):
3937
norm = get_full_norm_workflow()
4038
datagrab = func_datagrabber()
4139

@@ -69,14 +67,14 @@ def normalize_workflow(name="normalize"):
6967

7068
outputspec = norm.get_node('outputspec')
7169
norm.connect(infosource, 'subject_id', sinkd, 'container')
72-
norm.connect(outputspec, 'warped_image', sinkd, 'normalize.warped_image')
73-
norm.connect(outputspec, 'warp_field', sinkd, 'normalize.warped_field')
70+
norm.connect(outputspec, 'warped_image', sinkd, 'smri.warped_image')
71+
norm.connect(outputspec, 'warp_field', sinkd, 'smri.warped_field')
7472
norm.connect(outputspec, 'affine_transformation',
75-
sinkd, 'normalize.affine_transformation')
76-
norm.connect(outputspec, 'inverse_warp', sinkd, 'normalize.inverse_warp')
73+
sinkd, 'smri.affine_transformation')
74+
norm.connect(outputspec, 'inverse_warp', sinkd, 'smri.inverse_warp')
7775
norm.connect(outputspec, 'unwarped_brain',
78-
sinkd, 'normalize.unwarped_brain')
79-
norm.connect(outputspec, 'warped_brain', sinkd, 'normalize.warped_brain')
76+
sinkd, 'smri.unwarped_brain')
77+
norm.connect(outputspec, 'warped_brain', sinkd, 'smri.warped_brain')
8078

8179
return norm
8280

File renamed without changes.

fmri/resting/resting_preproc.py bips/fmri/resting/resting_preproc.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
#Imports ---------------------------------------------------------------------
2+
import argparse
23
import sys
3-
sys.path.append('..')
44

55
import nipype.interfaces.utility as util # utility
66
import nipype.pipeline.engine as pe # pypeline engine
77
import os
88

9-
from base import create_rest_prep
10-
from utils import get_datasink, get_substitutions, get_regexp_substitutions
11-
import argparse
9+
from ..base import create_rest_prep
10+
from ..utils import get_datasink, get_substitutions, get_regexp_substitutions
1211

1312
# Preprocessing
1413
# -------------------------------------------------------------

bips/fmri/setup.py

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
2+
# vi: set ft=python sts=4 ts=4 sw=4 et:
3+
def configuration(parent_package='',top_path=None):
4+
from numpy.distutils.misc_util import Configuration
5+
config = Configuration('fmri', parent_package, top_path)
6+
7+
# List all packages to be loaded here
8+
config.add_subpackage('misc')
9+
config.add_subpackage('qa')
10+
config.add_subpackage('resting')
11+
config.add_subpackage('task')
12+
13+
# List all data directories to be loaded here
14+
return config
15+
16+
if __name__ == '__main__':
17+
from numpy.distutils.core import setup
18+
setup(**configuration(top_path='').todict())

bips/fmri/task/__init__.py

Whitespace-only changes.
File renamed without changes.

fmri/task/fixedfx.py bips/fmri/task/fixedfx.py

+35-46
Original file line numberDiff line numberDiff line change
@@ -1,100 +1,89 @@
11
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
22
# vi: set ft=python sts=4 ts=4 sw=4 et:
3-
"""
4-
A pipeline example that intergrates spm, fsl freesurfer modules to perform a
5-
comparative volume and surface based first level analysis.
6-
7-
This tutorial uses the nipype-tutorial data and hence should be run from the
8-
directory containing tutorial data
9-
10-
python freesurfer_tutorial.py
11-
3+
"""Perform fixed effects analysis on runs processed by preproc.py
124
"""
135

146
import argparse
157
import os # system functions
168
import sys
17-
#from nipype.utils.config import config
18-
#config.enable_debug_mode()
199

20-
#from config import (subjects, root_dir, getcontrasts, auto_fixedfx, fwhm, run_on_grid, overlaythresh, subjectinfo, test_mode)
21-
from copy import deepcopy
22-
from glob import glob
2310
from nipype.workflows.fmri.fsl.estimate import create_fixed_effects_flow
24-
25-
import numpy as np
2611
import nipype.interfaces.io as nio # i/o routines
2712
import nipype.interfaces.fsl as fsl # fsl
2813
import nipype.interfaces.utility as util # utility
2914
import nipype.pipeline.engine as pe # pypeline engine
30-
import argparse
15+
3116
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
3217

3318

34-
def getinfo(subject_id,getcontrasts,subjectinfo):
19+
def getinfo(subject_id, getcontrasts, subjectinfo):
3520
numruns = len(subjectinfo(subject_id))
36-
print numruns
21+
print numruns # dbg
3722
numcon = len(getcontrasts(subject_id))
38-
info = dict(copes=[['subject_id', 'fwhm',range(1,numcon+1)]],
39-
varcopes=[['subject_id', 'fwhm', range(1,numcon+1)]],
23+
info = dict(copes=[['subject_id', 'fwhm', range(1, numcon + 1)]],
24+
varcopes=[['subject_id', 'fwhm', range(1, numcon + 1)]],
4025
dof_files=[['subject_id', 'fwhm']],
41-
mask_file=[['subject_id']])
26+
mask_file=[['subject_id']])
4227
return info
4328

29+
4430
def num_copes(files):
4531
if type(files[0]) is list:
4632
return len(files[0])
4733
else:
4834
return len(files)
4935

50-
def getsubs(subject_id,getcontrasts):
51-
52-
subs = [('_subject_id_%s/'%subject_id,''),
36+
37+
def getsubs(subject_id, getcontrasts):
38+
subs = [('_subject_id_%s/' % subject_id, ''),
5339
('_runs', '/runs'),
5440
('_fwhm', 'fwhm')]
5541
cons = getcontrasts(subject_id)
5642
for i, con in enumerate(cons):
57-
subs.append(('_flameo%d/cope1'%i, 'cope_%s'%(con[0])))
58-
subs.append(('_flameo%d/varcope1'%(i), 'varcope_%s'%(con[0])))
59-
subs.append(('_flameo%d/tstat1'%(i), 'tstat_%s'%(con[0])))
60-
subs.append(('_flameo%d/zstat1'%(i), 'zstat_%s'%(con[0])))
61-
subs.append(('_flameo%d/res4d'%(i), 'res4d_%s'%(con[0])))
62-
subs.append(('_ztop%d/zstat1_pval'%(i), 'pval_%s'%(con[0])))
63-
subs.append(('_slicestats%d/zstat1_overlay.png'%(i),'zstat_overlay%d_%s.png'%(i,con[0])))
43+
subs.append(('_flameo%d/cope1' % i, 'cope_%s' % con[0]))
44+
subs.append(('_flameo%d/varcope1' % i, 'varcope_%s' % con[0]))
45+
subs.append(('_flameo%d/tstat1' % i, 'tstat_%s' % con[0]))
46+
subs.append(('_flameo%d/zstat1' % i, 'zstat_%s' % con[0]))
47+
subs.append(('_flameo%d/res4d' % i, 'res4d_%s' % con[0]))
48+
subs.append(('_ztop%d/zstat1_pval' % i, 'pval_%s' % con[0]))
49+
subs.append(('_slicestats%d/zstat1_overlay.png' % i,
50+
'zstat_overlay%d_%s.png' % (i, con[0])))
6451
return subs
6552

53+
6654
def create_overlay_workflow(name='overlay'):
6755
# Setup overlay workflow
6856

6957
overlay = pe.Workflow(name='overlay')
70-
71-
inputspec = pe.Node(interface=util.IdentityInterface(fields=['subject_id',
72-
'fwhm',
73-
'stat_image']),
58+
59+
inputspec = pe.Node(util.IdentityInterface(fields=['subject_id',
60+
'fwhm',
61+
'stat_image']),
7462
name='inputspec')
75-
63+
7664
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
7765
outfields=['meanfunc']),
7866
name='datasource')
79-
80-
datasource.inputs.base_directory = os.path.join(c.sink_dir,'analyses','func')
67+
68+
datasource.inputs.base_directory = os.path.join(c.sink_dir, 'analyses',
69+
'func')
8170
datasource.inputs.template = '*'
8271
datasource.inputs.sort_filelist = True
8372
datasource.inputs.field_template = dict(meanfunc='%s/preproc/meanfunc/*.nii.gz')
84-
datasource.inputs.template_args = dict(meanfunc = [['subject_id']])
73+
datasource.inputs.template_args = dict(meanfunc=[['subject_id']])
8574

86-
overlaystats = pe.MapNode(interface=fsl.Overlay(),
75+
overlaystats = pe.MapNode(interface=fsl.Overlay(),
8776
name="overlaystats",
8877
iterfield=['stat_image'])
89-
90-
slicestats = pe.MapNode(interface=fsl.Slicer(),
78+
79+
slicestats = pe.MapNode(interface=fsl.Slicer(),
9180
name="slicestats",
9281
iterfield=['in_file'])
93-
82+
9483
slicestats.inputs.all_axial = True
9584
slicestats.inputs.image_width = 512
96-
overlaystats.inputs.show_negative_stats=True
97-
overlaystats.inputs.auto_thresh_bg=True
85+
overlaystats.inputs.show_negative_stats = True
86+
overlaystats.inputs.auto_thresh_bg = True
9887
overlaystats.inputs.stat_thresh = c.overlaythresh
9988

10089
overlay.connect(inputspec, 'subject_id', datasource, 'subject_id')

0 commit comments

Comments
 (0)