Skip to content

Commit

Permalink
Merge pull request #11 from sina-mansour/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
sina-mansour authored Aug 7, 2023
2 parents 91d805a + 2a97bba commit d1f509d
Show file tree
Hide file tree
Showing 5 changed files with 82 additions and 14 deletions.
28 changes: 28 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: Publish Python 🐍 distribution 📦 to PyPI upon new release

on:
release:
types: [created]

jobs:
build-n-publish:
name: Build and publish Python 🐍 distributions 📦 to PyPI
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
- name: Build and publish
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: |
python setup.py sdist bdist_wheel
twine upload dist/*
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,6 @@ connectome-spatial-smoothing.sublime-workspace
# test notebooks
code/Connectome_Spatial_Smoothing/__pycache__/
notebooks/temp.ipynb

# ignore the pypi toke
token.tmp
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,16 @@ Then, you could simply use the package in your own code after importing:

---

## Usage notes

### Verbosity:

By default CSS scripts print out a bunch of logs that may or may not be of interest to you. If you like to disable the logs, simply add the following script:

`css._verbose = False`

---

We have provided a short jupyter notebook showcasing all the functionalities described above. You may use the following link to open [this notebook](https://github.com/sina-mansour/connectome-based-smoothing/blob/main/notebooks/example.ipynb) in an interactive google colab session:

[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/sina-mansour/connectome-based-smoothing/blob/main/notebooks/example.ipynb)
Expand Down
53 changes: 40 additions & 13 deletions code/Connectome_Spatial_Smoothing/CSS.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
_main_dir = os.path.abspath(os.path.dirname(__file__))
_sample_cifti_dscalar = os.path.join(_main_dir, 'data/templates/cifti/ones.dscalar.nii')
_glasser_cifti = os.path.join(_main_dir, 'data/templates/atlas/Glasser360.32k_fs_LR.dlabel.nii')
_verbose=True


def _join_path(*args):
Expand All @@ -53,12 +54,13 @@ def _time_str(mode='abs', base=None):


def _print_log(message, mode='info'):
if mode == 'info':
print ('{}: \033[0;32m[INFO]\033[0m {}'.format(_time_str(), message))
if mode == 'err':
print ('{}: \033[0;31m[ERROR]\033[0m {}'.format(_time_str(), message))
quit()
sys.stdout.flush()
if _verbose:
if mode == 'info':
print ('{}: \033[0;32m[INFO]\033[0m {}'.format(_time_str(), message))
if mode == 'err':
print ('{}: \033[0;31m[ERROR]\033[0m {}'.format(_time_str(), message))
quit()
sys.stdout.flush()


def _handle_process_with_que(que, func, args, kwds):
Expand Down Expand Up @@ -206,7 +208,7 @@ def parcellation_characteristic_matrix(atlas_file=_glasser_cifti, cifti_file=_sa

label_dict = {x: i for (i, x) in enumerate(labels)}

parcellation_matrix = np.zeros((len(labels), cortical_vertices_count))
parcellation_matrix = np.zeros((len(labels), len(surface_labels)))

for (i, x) in enumerate(surface_labels):
parcellation_matrix[label_dict[x], i] = 1
Expand Down Expand Up @@ -601,11 +603,19 @@ def _get_half_incidence_matrices_from_endpoint_distances(start_dists,
end_dists,
end_indices,
node_count,
threshold):
threshold,
weights=None):
"""
Returns two half incidence matrices in a sparse format (CSR) after
filtering the streamlines that are far (>2mm) from their closest vertex.
"""

if weights is None:
weights = np.ones(len(start_dists))
elif (type(weights) == str):
# load text file contents (this could be sift weights files generated by mrtrix)
weights = np.genfromtxt(weights)

# mask points that are further than the threshold from all surface coordinates
outlier_mask = (start_dists > threshold) | (end_dists > threshold)
_print_log('outliers located: #{} outliers ({}%, with threshold {}mm)'.format(
Expand All @@ -620,7 +630,7 @@ def _get_half_incidence_matrices_from_endpoint_distances(start_dists,
end_dict = {}
indices = (i for i in range(len(outlier_mask)) if not outlier_mask[i])
for l, i in enumerate(indices):
start_dict[(start_indices[i], l)] = start_dict.get((start_indices[i], l), 0) + 1
start_dict[(start_indices[i], l)] = start_dict.get((start_indices[i], l), 0) + weights[i]
end_dict[(end_indices[i], l)] = end_dict.get((end_indices[i], l), 0) + 1

start_inc_mat = sparse.dok_matrix(
Expand Down Expand Up @@ -650,11 +660,15 @@ def _get_half_incidence_matrices_from_endpoint_distances(start_dists,
return (start_inc_mat.tocsr(), end_inc_mat.tocsr())


def _get_adjacency_from_half_incidence_matrices(U, V):
def _get_adjacency_from_half_incidence_matrices(U, V, stat='sum'):
"""
return a sparse adjacency matrix A from the two halfs of incidence matrix U & V.
"""
A = U.dot(V.T)
if stat == 'mean':
# compute average instead of sum
A_div = (U != 0).astype(int).dot(((V != 0).astype(int)).T)
A.data = A.data / A_div.data
return A + A.T


Expand All @@ -664,7 +678,9 @@ def map_high_resolution_structural_connectivity(track_file,
warp_file=None,
threshold=2,
subcortex=False,
cifti_file=_sample_cifti_dscalar,):
cifti_file=_sample_cifti_dscalar,
weights=None,
stat='sum'):
"""
Map the high-resolution structural connectivity matrix from tractography outputs.
Expand All @@ -689,6 +705,15 @@ def map_high_resolution_structural_connectivity(track_file,
to determine the high-resolution structure to exclude the medial wall and potentially
integrate subcortical volume.
weights: [optional] A numpy vector with the length of number of streamlines, or a file path
pointing to a per-streamline weights file (such as the files generated by mrtrix's
tcksift2). If this file is provided, then the weights are used to adjust the
contribution of every streamline to the connectivity matrix.
stat: [optional, default='sum'] Either 'sum', or 'mean'. If sum, the values will be added up
over all streamlines. Otherwise an average is computed over all streamline weights.
combination of stat='mean' with a list of per-streamline lengths could be used to compute
a mean distance connectivity matrix.
Returns:
connectome: The high-resolution structural connectome in a sparse csr format.
Expand All @@ -703,8 +728,10 @@ def map_high_resolution_structural_connectivity(track_file,
warp_file,
subcortex=subcortex,
),
threshold=threshold
)
threshold=threshold,
weights=weights
),
stat=stat
)


Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

setuptools.setup(
name="Connectome_Spatial_Smoothing",
version="0.1.3",
version="0.1.4",
author="Sina Mansour L.",
author_email="[email protected]",
description="Connectome Spatial Smoothing",
Expand Down

0 comments on commit d1f509d

Please sign in to comment.