Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Working test of SFS in Python3.12 #1132

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .python-version
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
3.10
>=3.9,<3.11
3.12
>=3.11, < 3.13
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
**Scilpy** mainly comprises tools and utilities to quickly work with diffusion MRI. Most of the tools are based
on or are wrappers of the [DIPY] library, and most of them will eventually be migrated to [DIPY]. Those tools implement the recommended workflows and parameters used in the lab.

The library is now built for Python 3.10 so be sure to create a virtual environnement for Python 3.10. If this version is not installed on your computer:
The library is now built for Python 3.12 so be sure to create a virtual environnement for Python 3.12. If this version is not installed on your computer:
```
sudo add-apt-repository ppa:deadsnakes/ppa
sudo apt-get install python3.10 python3.10-dev python3.10-venv python3.10-minimal python3.10-tk
sudo apt-get install python3.12 python3.12-dev python3.12-venv python3.12-minimal python3.12-tk
```

Make sure your pip is up-to-date before trying to install:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@
requires = [
"setuptools >= 64",
"Cython==3.0.*",
"numpy==1.25.*"
"numpy==1.26.*"
]
89 changes: 43 additions & 46 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,52 +1,49 @@
bids-validator==1.11.*
bctpy==0.5.*
bz2file==0.98.*
bctpy==0.6.*
bids-validator==1.14.*
bz2file==0.98
coloredlogs==15.0.*
cvxpy==1.4.*
cycler==0.11.*
#Cython==0.29.*, !=0.29.29
Cython==3.0.*
dipy==1.10.*
deepdiff==6.3.0
dmri-amico==2.0.3
dmri-commit==2.3.0
cvxpy==1.6.*
cycler==0.12.*
# Cython==3.0.*
deepdiff==8.1.*
dipy==1.11.*
dmri-amico==2.1.*
dmri-commit==2.3.*
docopt==0.6.*
dvc==3.48.*
dvc-http==2.32.*
formulaic==0.3.*
fury==0.11.*
future==0.18.*
dvc==3.59.*
formulaic==0.5.*
fury==0.13.*
future==1.0.*
GitPython==3.1.*
h5py==3.10.*
joblib==1.2.*
h5py==3.12.*
joblib==1.4.*
kiwisolver==1.4.*
matplotlib==3.6.*
matplotlib==3.10.*
nibabel==5.3.*
nilearn==0.11.*
nltk==3.9.*
numba==0.61.*
numba-kdtree==0.4.*
numpy==1.26.*
openpyxl==3.1.*
packaging==24.*
pybids==0.18.*
PyMCubes==0.1.*
nibabel==5.2.*
nilearn==0.9.*
numba==0.59.1
numba-kdtree==0.4.0
nltk==3.8.*
numpy==1.25.*
openpyxl==3.0.*
packaging == 23.2.*
Pillow==10.2.*
pybids==0.16.*
pyparsing==3.0.*
pyparsing==3.2.*
PySocks==1.7.*
pytest==7.2.*
pytest-console-scripts==1.3.*
pytest-cov==4.1.0
pytest-html==4.1.1
pytest-mock==3.10.*
python-dateutil==2.8.*
pytz==2022.6.*
requests==2.28.*
scikit-learn==1.2.*
scikit-image==0.22.*
scipy==1.11.*
six==1.16.*
spams==2.6.*
statsmodels==0.13.*
trimeshpy==0.0.4
vtk==9.2.*
pytest==8.3.*
pytest-console-scripts==1.4.*
pytest-cov==6.0.*
pytest-html==4.1.*
pytest-metadata==3.1.*
pytest-mock==3.14.*
python-dateutil==2.9.*
pytz==2024.2
requests==2.32.*
scikit-image==0.25.*
scikit-learn==1.6.*
scipy==1.15.*
six==1.17.*
statsmodels==0.14.*
trimeshpy==0.0.*
vtk==9.3.*
45 changes: 41 additions & 4 deletions scilpy/io/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import nibabel as nib
import numpy as np
from dipy.data import SPHERE_FILES
from dipy.io.utils import is_header_compatible
from dipy.io.utils import is_header_compatible, Space, Origin
from scipy.io import loadmat
import six

Expand Down Expand Up @@ -307,6 +307,29 @@ def add_bbox_arg(parser):
'streamlines).')


def add_surface_spatial_arg(parser):
SPACES = ['vox', 'voxmm', 'rasmm', 'lpsmm']
ORIGINS = ['corner', 'center']
surf = parser.add_argument_group(title='Surface spatial options')
surf.add_argument('--source_space',
default='rasmm', choices=SPACES,
help='Source space of the input surface [%(default)s].')
surf.add_argument('--destination_space',
default='rasmm', choices=SPACES,
help='Destination space of the output surface [%(default)s].')
surf.add_argument('--source_origin',
default='center', choices=ORIGINS,
help='Source origin of the input surface [%(default)s].')
surf.add_argument('--destination_origin',
default='center', choices=ORIGINS,
help='Destination origin of the output surface [%(default)s].')


def add_vtk_legacy_arg(parser):
parser.add_argument('--legacy_vtk_format', action='store_true',
help='Save the VTK file in the legacy format.')


def add_sh_basis_args(parser, mandatory=False, input_output=False):
"""
Add spherical harmonics (SH) bases argument. For more information about
Expand Down Expand Up @@ -525,14 +548,14 @@ def add_volume_screenshot_args(parser, input_name, mandatory=True,
cmap_parsing_group.add_argument(f"--{input_name}_cmap_name",
default=default_cmap,
help=f"Colormap name for the {descriptor} "
f"image data. [%(default)s]")
f"image data. [%(default)s]")

opacity_parsing_group.add_argument(f"--{input_name}_opacity",
type=ranged_type(float, 0., 1.),
default=default_alpha,
help=f"Opacity value for the "
f"{descriptor} image data. "
f"[%(default)s]")
f"{descriptor} image data. "
f"[%(default)s]")


def add_default_screenshot_args(parser, slice_ids_mandatory=True,
Expand Down Expand Up @@ -1249,3 +1272,17 @@ def get_default_screenshotting_data(args, peaks=True):
ovl_imgs,
ovl_colors,
peaks_imgs)


def convert_stateful_str_to_enum(args):
"""
Convert spatial arguments from string to enum for stateful operations.
"""

for space in ['source_space', 'destination_space']:
if hasattr(args, space):
setattr(args, space, Space(args.__getattribute__(space)))

for origin in ['source_origin', 'destination_origin']:
if hasattr(args, origin):
setattr(args, origin, Origin(args.__getattribute__(origin)))
10 changes: 5 additions & 5 deletions scilpy/tractanalysis/tests/test_reproducibility_measures.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,15 +64,15 @@ def test_tractogram_pairwise_comparison():

# Comparing with values obtained when creating this test.
np.testing.assert_almost_equal(np.mean(acc_norm[~np.isnan(acc_norm)]),
0.6590763379712203, decimal=6)
0.659076, decimal=3)
np.testing.assert_almost_equal(np.mean(corr_norm[~np.isnan(corr_norm)]),
0.6263207793235779, decimal=6)
0.626320, decimal=3)
np.testing.assert_almost_equal(np.max(corr_norm[~np.isnan(corr_norm)]),
0.99676438850212097, decimal=6)
0.996764, decimal=3)
np.testing.assert_almost_equal(np.mean(diff_norm[~np.isnan(diff_norm)]),
0.7345049471266359, decimal=6)
0.734504, decimal=3)
np.testing.assert_almost_equal(np.mean(heatmap[~np.isnan(heatmap)]),
0.7395923591441349, decimal=6)
0.739592, decimal=3)

# Supervise the number of NaNs in each output.
# Note. Not the same because:
Expand Down
2 changes: 2 additions & 0 deletions scilpy/tractograms/streamline_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,6 +586,8 @@ def resample_streamlines_step_size(sft, step_size):

# Return to original space
resampled_sft.to_space(orig_space)
resampled_sft.streamlines._data = resampled_sft.streamlines._data.astype(
np.float32)

return resampled_sft

Expand Down
4 changes: 2 additions & 2 deletions scilpy/tractograms/tests/test_dps_and_dpp_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,15 +70,15 @@ def test_add_data_as_color_dpp():

def test_convert_dps_to_dpp():
fake_sft = _get_small_sft()
fake_sft.data_per_streamline['my_dps'] = [5, 6]
fake_sft.data_per_streamline['my_dps'] = np.array([5, 6])

# Converting
fake_sft = convert_dps_to_dpp(fake_sft, 'my_dps')
assert len(fake_sft.data_per_streamline.keys()) == 0
assert list(fake_sft.data_per_point.keys()) == ['my_dps']

# Add again, will fail. Allow overwrite.
fake_sft.data_per_streamline['my_dps'] = [5, 6]
fake_sft.data_per_streamline['my_dps'] = np.array([5, 6])
failed = False
try:
_ = convert_dps_to_dpp(fake_sft, 'my_dps')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def _setup_files():

# Load sft
sft = load_tractogram(in_sft, reference)
sft.streamlines._data = sft.streamlines._data.astype(np.float32)
return sft, reference, head_tail_rois, head_tail_offset_rois, center_roi


Expand All @@ -82,7 +83,7 @@ def test_get_endpoints_density_map_five_points():
"""

sft, reference, *_ = _setup_files()

print(sft.streamlines._data.dtype)
endpoints_map = get_endpoints_density_map(
sft, point_to_select=5, to_millimeters=True)

Expand Down Expand Up @@ -326,7 +327,7 @@ def test_compute_streamline_segment():
streamline between two rois.
"""

sft, reference, _, head_tail_offset_rois, _ = _setup_files()
sft, _, _, head_tail_offset_rois, _ = _setup_files()

sft.to_vox()
sft.to_corner()
Expand Down
2 changes: 1 addition & 1 deletion scilpy/tractograms/tests/test_tractogram_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
sft = load_tractogram(in_sft, 'same')[0:4]

# Faking data_per_streamline
sft.data_per_streamline['test'] = [1] * len(sft)
sft.data_per_streamline['test'] = np.ones(len(sft))
sft.data_per_point['test2'] = [[[1, 2, 3]] * len(s) for s in sft.streamlines]


Expand Down
3 changes: 2 additions & 1 deletion scilpy/tractograms/tractogram_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ def shuffle_streamlines(sft, rng_seed=None):
The shuffled tractogram.
"""
indices = np.arange(len(sft.streamlines))
random.shuffle(indices, random=rng_seed)
random.seed(rng_seed)
random.shuffle(indices)

streamlines = sft.streamlines[indices]
data_per_streamline = sft.data_per_streamline[indices]
Expand Down
7 changes: 4 additions & 3 deletions scilpy/utils/scilpy_bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,20 @@
import re
import subprocess

import nltk
from nltk.stem import PorterStemmer

from tqdm import tqdm

SPACING_LEN = 80

stemmer = PorterStemmer()
try:
import nltk
from nltk.stem import PorterStemmer
nltk.download('punkt', quiet=True)
nltk.download('wordnet', quiet=True)
except ImportError:
raise ImportError("You must install the 'nltk' package to use this script."
"Please run 'pip install nltk'.")
stemmer = PorterStemmer()

# Path to the JSON file containing script information and keywords
VOCAB_FILE_PATH = pathlib.Path(
Expand Down
Loading
Loading