Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ENH] update ICA to sklearn from mdp #44

Merged
merged 15 commits into from
Nov 13, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ More information and documentation can be found at https://tedana.readthedocs.io
You'll need to set up a working development environment to use `tedana`.
To set up a local environment, you will need Python >=3.6 and the following packages will need to be installed:

[mdp](https://pypi.org/project/MDP/)
[numpy](http://www.numpy.org/)
[scikit-learn](http://scikit-learn.org/stable/)
[scipy](https://www.scipy.org/)
Expand Down
3 changes: 1 addition & 2 deletions docs/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@ Installation
------------

You'll need to set up a working development environment to use ``tedana``.
To set up a local environment, you will need Python >=3.6 and the following
To set up a local environment, you will need Python >=3.5 and the following
packages will need to be installed:

- mdp
- nilearn
- nibabel>=2.1.0
- numpy
Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
numpy>=1.14
scikit-learn
scipy
mdp
nilearn
nibabel>=2.1.0
pywavelets
Expand Down
39 changes: 17 additions & 22 deletions tedana/decomposition/eigendecomp.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,8 +333,7 @@ def tedpca(catd, OCcatd, combmode, mask, t2s, t2sG, stabilize,
return n_components, kept_data


def tedica(n_components, dd, conv, fixed_seed, cost, final_cost,
verbose=False):
def tedica(n_components, dd, conv, fixed_seed, cost='logcosh'):
"""
Performs ICA on `dd` and returns mixing matrix

Expand All @@ -347,16 +346,10 @@ def tedica(n_components, dd, conv, fixed_seed, cost, final_cost,
samples and `T` is time
conv : :obj:`float`
Convergence limit for ICA
fixed_seed : :obj:`int`
Value passed to ``mdp.numx_rand.seed()``.
Set to an integer value for reproducible ICA results;
otherwise, set to -1 for varying results across calls.
cost : {'tanh', 'pow3', 'gaus', 'skew'}
Initial cost function for ICA.
final_cost : {'tanh', 'pow3', 'gaus', 'skew'}
Final cost function for ICA.
verbose : :obj:`bool`, optional
Whether to print messages regarding convergence process. Default: False
cost : {'logcosh', 'exp', 'cube'} str, optional
Cost function for ICA
fixed_seed : int
Seed for ensuring reproducibility of ICA results

Returns
-------
Expand All @@ -366,20 +359,22 @@ def tedica(n_components, dd, conv, fixed_seed, cost, final_cost,

Notes
-----
Uses `mdp` implementation of FastICA for decomposition
Uses `sklearn` implementation of FastICA for decomposition
"""

import mdp
from sklearn.decomposition import FastICA

if cost not in ('logcosh', 'cube', 'exp'):
LGR.error('ICA cost function not understood')
raise

climit = float(conv)
if fixed_seed == -1:
fixed_seed = np.random.randint(low=1, high=1000)
mdp.numx_rand.seed(fixed_seed)
icanode = mdp.nodes.FastICANode(white_comp=n_components, approach='symm',
g=cost, fine_g=final_cost,
coarse_limit=climit*100, limit=climit,
verbose=verbose)
icanode.train(dd)
smaps = icanode.execute(dd) # noqa
mmix = icanode.get_recmatrix().T
rand_state = np.random.RandomState(seed=fixed_seed)
ica = FastICA(n_components=n_components, algorithm='parallel',
fun=cost, tol=climit, random_state=rand_state)
ica.fit(dd)
mmix = ica.mixing_
mmix = stats.zscore(mmix, axis=0)
return mmix, fixed_seed
1 change: 0 additions & 1 deletion tedana/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
REQUIRES = [
'numpy',
'scikit-learn',
'mdp',
'pywavelets',
'nilearn',
'nibabel>=2.1.0',
Expand Down
35 changes: 12 additions & 23 deletions tedana/workflows/tedana.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,19 +103,12 @@ def _get_parser():
help=('Combination scheme for TEs: '
't2s (Posse 1999, default), ste (Poser)'),
default='t2s')
parser.add_argument('--initcost',
dest='initcost',
action='store',
choices=['tanh', 'pow3', 'gaus', 'skew'],
help=('Initial cost function for ICA.'),
default='tanh')
parser.add_argument('--finalcost',
dest='finalcost',
action='store',
choices=['tanh', 'pow3', 'gaus', 'skew'],
help=('Final cost function for ICA. Same options as '
'initcost.'),
default='tanh')
parser.add_argument('--cost',
dest='cost',
help=('Cost func. for ICA: '
'logcosh (default), cube, exp'),
choices=['logcosh', 'cube', 'exp'],
default='logcosh')
parser.add_argument('--denoiseTEs',
dest='dne',
action='store_true',
Expand Down Expand Up @@ -174,8 +167,7 @@ def _get_parser():

def tedana_workflow(data, tes, mask=None, mixm=None, ctab=None, manacc=None,
strict=False, gscontrol=True, kdaw=10., rdaw=1., conv=2.5e-5,
ste=-1, combmode='t2s', dne=False,
initcost='tanh', finalcost='tanh',
ste=-1, combmode='t2s', dne=False, cost='logcosh',
stabilize=False, filecsdata=False, wvpca=False,
label=None, fixed_seed=42, debug=False, quiet=False):
"""
Expand Down Expand Up @@ -219,10 +211,8 @@ def tedana_workflow(data, tes, mask=None, mixm=None, ctab=None, manacc=None,
Combination scheme for TEs: 't2s' (Posse 1999, default), 'ste' (Poser).
dne : :obj:`bool`, optional
Denoise each TE dataset separately. Default is False.
initcost : {'tanh', 'pow3', 'gaus', 'skew'}, optional
Initial cost function for ICA. Default is 'tanh'.
finalcost : {'tanh', 'pow3', 'gaus', 'skew'}, optional
Final cost function. Default is 'tanh'.
cost : {'logcosh', 'exp', 'cube'} str, optional
Cost function for ICA
stabilize : :obj:`bool`, optional
Stabilize convergence by reducing dimensionality, for low quality data.
Default is False.
Expand Down Expand Up @@ -337,11 +327,10 @@ def tedana_workflow(data, tes, mask=None, mixm=None, ctab=None, manacc=None,
t2s, t2sG, stabilize, ref_img,
tes=tes, kdaw=kdaw, rdaw=rdaw,
ste=ste, wvpca=wvpca)
# Perform ICA on dimensionally reduced data (*without* thermal noise)
mmix_orig, fixed_seed = decomposition.tedica(n_components, dd, conv, fixed_seed,
cost=initcost, final_cost=finalcost,
verbose=debug)
mmix_orig, fixed_seed = decomposition.tedica(n_components, dd, conv,
fixed_seed, cost=cost)
np.savetxt(op.join(out_dir, '__meica_mix.1D'), mmix_orig)

LGR.info('Making second component selection guess from ICA results')
# Estimate betas and compute selection metrics for mixing matrix
# generated from dimensionally reduced data using full data (i.e., data
Expand Down