Skip to content

Commit

Permalink
Merge pull request #1004 from karanphil/bundle_fixel
Browse files Browse the repository at this point in the history
[ENH] Bundle fixel analysis script
  • Loading branch information
arnaudbore authored Jul 25, 2024
2 parents cfaffc4 + 8112974 commit 460dac8
Show file tree
Hide file tree
Showing 4 changed files with 570 additions and 0 deletions.
147 changes: 147 additions & 0 deletions scilpy/tractanalysis/fixel_density.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
import itertools
import multiprocessing
import numpy as np

from dipy.io.streamline import load_tractogram
from scilpy.tractanalysis.grid_intersections import grid_intersections


def _fixel_density_parallel(args):
peaks = args[0]
max_theta = args[1]
dps_key = args[2]
bundle = args[3]

sft = load_tractogram(bundle, 'same')
sft.to_vox()
sft.to_corner()

fixel_density_maps = np.zeros((peaks.shape[:-1]) + (5,))

min_cos_theta = np.cos(np.radians(max_theta))

all_crossed_indices = grid_intersections(sft.streamlines)
for i, crossed_indices in enumerate(all_crossed_indices):
segments = crossed_indices[1:] - crossed_indices[:-1]
seg_lengths = np.linalg.norm(segments, axis=1)

# Remove points where the segment is zero.
# This removes numpy warnings of division by zero.
non_zero_lengths = np.nonzero(seg_lengths)[0]
segments = segments[non_zero_lengths]
seg_lengths = seg_lengths[non_zero_lengths]

# Those starting points are used for the segment vox_idx computations
seg_start = crossed_indices[non_zero_lengths]
vox_indices = (seg_start + (0.5 * segments)).astype(int)

normalized_seg = np.reshape(segments / seg_lengths[..., None], (-1, 3))

weight = 1
if dps_key:
weight = sft.data_per_streamline[dps_key][i]

for vox_idx, seg_dir in zip(vox_indices, normalized_seg):
vox_idx = tuple(vox_idx)
peaks_at_idx = peaks[vox_idx].reshape((5, 3))

cos_theta = np.abs(np.dot(seg_dir.reshape((-1, 3)),
peaks_at_idx.T))

if (cos_theta > min_cos_theta).any():
lobe_idx = np.argmax(np.squeeze(cos_theta), axis=0) # (n_segs)
fixel_density_maps[vox_idx][lobe_idx] += weight

return fixel_density_maps


def fixel_density(peaks, bundles, dps_key=None, max_theta=45,
nbr_processes=None):
"""Compute the fixel density map per bundle. Can use parallel processing.
Parameters
----------
peaks : np.ndarray (x, y, z, 15)
Five principal fiber orientations for each voxel.
bundles : list or np.array (N)
List of (N) paths to bundles.
dps_key : string, optional
Key to the data_per_streamline to use as weight instead of the number
of streamlines.
max_theta : int, optional
Maximum angle between streamline and peak to be associated.
nbr_processes : int, optional
The number of subprocesses to use.
Default: multiprocessing.cpu_count()
Returns
-------
fixel_density : np.ndarray (x, y, z, 5, N)
Density per fixel per bundle.
"""
nbr_processes = multiprocessing.cpu_count() \
if nbr_processes is None or nbr_processes <= 0 \
else nbr_processes

pool = multiprocessing.Pool(nbr_processes)
results = pool.map(_fixel_density_parallel,
zip(itertools.repeat(peaks),
itertools.repeat(max_theta),
itertools.repeat(dps_key),
bundles))
pool.close()
pool.join()

fixel_density = np.moveaxis(np.asarray(results), 0, -1)

return fixel_density


def maps_to_masks(maps, abs_thr, rel_thr, norm, nb_bundles):
"""Compute the fixel density masks from fixel density maps.
Parameters
----------
maps : np.ndarray (x, y, z, 5, N)
Density per fixel per bundle.
abs_thr : float
Value of density maps threshold to obtain density masks, in number of
streamlines or streamline weighting.
rel_thr : float
Value of density maps threshold to obtain density masks, as a ratio of
the normalized density. Must be between 0 and 1.
norm : string, ["fixel", "voxel"]
Way of normalizing the density maps. If fixel, will normalize the maps
per fixel, in each voxel. If voxel, will normalize the maps per voxel.
nb_bundles : int (N)
Number of bundles (N).
Returns
-------
masks : np.ndarray (x, y, z, 5, N)
Density masks per fixel per bundle.
maps : np.ndarray (x, y, z, 5, N)
Normalized density maps per fixel per bundle.
"""
# Apply a threshold on the number of streamlines
masks_abs = maps > abs_thr

# Normalizing the density maps per voxel or fixel
fixel_sum = np.sum(maps, axis=-1)
voxel_sum = np.sum(fixel_sum, axis=-1)
for i in range(nb_bundles):
if norm == "voxel":
maps[..., 0, i] /= voxel_sum
maps[..., 1, i] /= voxel_sum
maps[..., 2, i] /= voxel_sum
maps[..., 3, i] /= voxel_sum
maps[..., 4, i] /= voxel_sum
elif norm == "fixel":
maps[..., i] /= fixel_sum

# Apply a threshold on the normalized density
masks_rel = maps > rel_thr
# Compute the fixel density masks from the rel and abs versions
masks = masks_rel * masks_abs

return masks.astype(np.uint8), maps
11 changes: 11 additions & 0 deletions scilpy/tractanalysis/tests/test_fixel_density.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# -*- coding: utf-8 -*-


def test_fixel_density():
# toDO
pass


def test_maps_to_masks():
# toDO
pass
Loading

0 comments on commit 460dac8

Please sign in to comment.