Skip to content

Commit

Permalink
Various review comments (#11)
Browse files Browse the repository at this point in the history
* Update README

* Various minor code cleanups

* Further minor code cleanups

* Lock python version to allow scipy update

* Various review comments

* Fix faulty merge

---------

Co-authored-by: Reinder Vos de Wael <[email protected]>
  • Loading branch information
ReinderVosDeWael and ReinderVosDeWael authored Aug 3, 2023
1 parent 6aba59f commit f322cff
Show file tree
Hide file tree
Showing 8 changed files with 304 additions and 198 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![L-GPL License](https://img.shields.io/badge/license-L--GPL-blue.svg)](LICENSE)

This is a command line interface (CLI) for running BrainSpace on BIDS-compliant datasets. Gradients are computed for volumetric files in NIFTI format, or surface files in GIFTI format. For more details on BrainSpace, see the [BrainSpace documentation](https://brainspace.readthedocs.io/en/latest/).
This is a command line interface (CLI) for running BrainSpace on BIDS-compliant datasets. BrainSpace is a toolbox for macroscale gradient mapping, a common method to reduce the dimensionality of neuroimaging data. Gradients are computed for volumetric files in NIFTI format, or surface files in GIFTI format. For more details on BrainSpace, see the [BrainSpace documentation](https://brainspace.readthedocs.io/en/latest/).

## Installation

Expand Down
186 changes: 4 additions & 182 deletions src/ba_timeseries_gradients/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,10 @@
""" Command line interface for ba_timeseries_gradients. """
import argparse
import logging
import pathlib

import bids

from ba_timeseries_gradients import exceptions, gradients, logs, utils
from ba_timeseries_gradients import exceptions, gradients, logs, parser, utils

LOGGER_NAME = logs.LOGGER_NAME

Expand All @@ -18,10 +17,9 @@ def main() -> None:
The main function that runs the ba_timeseries_gradients command line interface.
"""
logger.debug("Parsing command line arguments...")
parser = _get_parser()
args = parser.parse_args()
args = parser.get_parser().parse_args()

logger.setLevel(args.verbose)
logger.setLevel(logging.getLevelName(args.verbose.upper()))

logger.debug("Getting input files...")
files = _get_bids_files(args)
Expand Down Expand Up @@ -49,180 +47,6 @@ def main() -> None:
utils.save(output_gradients, lambdas, output_file)


def _get_parser() -> argparse.ArgumentParser:
"""
Returns an ArgumentParser object with the command line arguments for the ba_timeseries_gradients CLI.
Returns:
argparse.ArgumentParser: An ArgumentParser object with the command line arguments.
Notes:
Arguments in the bids_group must have a `dest` value equivalent to `bids_<argument>`, where
<argument> is the name of the argument in the BIDS specification.
"""
parser = argparse.ArgumentParser(
prog="ba_timeseries_gradients",
description="""Computes gradients for a BIDS dataset. If the target
files are volumetric, they must be in NIFTI format, and a parcellation
file must be provided.""",
epilog="""Issues can be reported at: https://github.com/cmi-dair/ba_timeseries_gradients.""",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)

mandatory_group = parser.add_argument_group("Mandatory arguments")
bids_group = parser.add_argument_group("BIDS arguments")
brainspace_group = parser.add_argument_group("BrainSpace arguments")
other_group = parser.add_argument_group("Other arguments")

mandatory_group.add_argument(
"bids_dir",
type=pathlib.Path,
help="BIDS directory containing the input files.",
)
mandatory_group.add_argument(
"output_dir",
type=pathlib.Path,
help="Output directory.",
)
mandatory_group.add_argument(
"analysis_level",
type=str,
help="Level of the analysis that will be performed.",
choices=["group"],
)
bids_group.add_argument(
"--subject",
required=False,
default=None,
type=str,
action="append",
dest="bids_subject",
help="The subject regex to use for searching BIDS files.",
)
bids_group.add_argument(
"--session",
required=False,
default=None,
type=str,
action="append",
dest="bids_session",
help="The session to include for finding BIDS files.",
)
bids_group.add_argument(
"--suffix",
required=False,
default="bold",
type=str,
dest="bids_suffix",
help="Suffix to use for finding BIDS files.",
)
bids_group.add_argument(
"--run",
required=False,
default=None,
type=int,
action="append",
dest="bids_run",
help="The runs to include, may be supplied multiple times.",
)
bids_group.add_argument(
"--task",
required=False,
default=None,
type=str,
action="append",
dest="bids_task",
help="The tasks to include, may be supplied multiple times.",
)
bids_group.add_argument(
"--space",
required=False,
default=None,
type=str,
dest="bids_space",
help="The space to use for finding BIDS files.",
)
bids_group.add_argument(
"--extension",
required=False,
default=".nii.gz",
type=str,
dest="bids_extension",
help="Extension to use for finding BIDS files.",
)
bids_group.add_argument(
"--datatype",
required=False,
default=None,
type=str,
dest="bids_datatype",
)
brainspace_group.add_argument(
"--parcellation",
required=False,
default=None,
type=pathlib.Path,
help="Parcellation to use for similarity calculation. Must be a GIFTI or NIFTI file, obligatory if input files are NIFTI.",
)
brainspace_group.add_argument(
"--dimensionality_reduction",
required=False,
default="dm",
type=str,
help="Dimensionality reduction method to use. Must be one of 'pca', 'le', or 'dm'.",
)
brainspace_group.add_argument(
"--kernel",
required=False,
default="cosine",
type=str,
help="Kernel to use for similarity calculation. Must be one of: 'pearson', 'spearman', 'cosine', 'normalized_angle', 'gaussian'.",
)
brainspace_group.add_argument(
"--sparsity",
required=False,
default=0.9,
type=float,
help="Sparsity to use for similarity calculation. Must be a float between 0 and 1.",
)
brainspace_group.add_argument(
"--n_components",
required=False,
default=10,
type=int,
help="Number of components to use for dimensionality reduction. Must be an integer.",
)
other_group.add_argument(
"--force",
required=False,
action="store_true",
help="Force overwrite of output file if it already exists.",
)
other_group.add_argument(
"--verbose",
required=False,
default=logging.INFO,
type=int,
help="Verbosity level. Must be one of: 10 (DEBUG), 20 (INFO), 30 (WARNING), 40 (ERROR), 50 (CRITICAL).",
)
other_group.add_argument(
"--output_format",
required=False,
default="h5",
type=str,
help="Output file format",
choices=["h5", "json"],
)
other_group.add_argument(
"--dry-run",
required=False,
action="store_true",
help="Do not run the pipeline, only show what input files would be used. Note that dry run is logged at the logging.INFO level.",
)

return parser


def _raise_invalid_input(args: argparse.Namespace, files: list[str]) -> None:
"""
Check if the input arguments are valid.
Expand All @@ -235,7 +59,6 @@ def _raise_invalid_input(args: argparse.Namespace, files: list[str]) -> None:
InputError: If the output file already exists and the force flag is not set.
InputError: If no input files are found.
InputError: If input files are volume files and no parcellation is provided.
InputError: If the output format is not one of: 'hdf5', or 'json'.
"""
if (args.output_dir / "gradients.h5").exists() and not args.force:
raise exceptions.InputError(
Expand All @@ -252,8 +75,7 @@ def _raise_invalid_input(args: argparse.Namespace, files: list[str]) -> None:


def _get_bids_files(args: argparse.Namespace) -> list[str]:
"""
Get the list of input files from the BIDS directory.
"""Get the list of input files from the BIDS directory.
Args:
args: The parsed command-line arguments.
Expand Down
9 changes: 6 additions & 3 deletions src/ba_timeseries_gradients/gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ def compute_gradients(
n_components: int = 10,
sparsity: float = 0.9,
) -> tuple[np.ndarray, np.ndarray]:
"""
Computes the gradients for a collection of files.
"""Computes the gradients for a collection of files.
Args:
files: A collection of file paths containing timeseries data.
Expand All @@ -39,7 +38,8 @@ def compute_gradients(
sparsity: The sparsity level to use for the gradient computation.
Returns:
The computed gradients as a numpy array.
numpy.ndarray: The computed gradients.
numpy.ndarray: The computed lambdas.
Notes:
The gradient computation is performed using the BrainSpace package.
Expand Down Expand Up @@ -77,6 +77,9 @@ def _get_connectivity_matrix(
Returns:
A connectivity matrix as a numpy array.
"""
if not files:
raise ValueError("No files provided.")

if parcellation_file:
logger.debug("Loading parcellation data...")
parcellation = nib.load(parcellation_file)
Expand Down
Loading

0 comments on commit f322cff

Please sign in to comment.