Skip to content

Commit

Permalink
ENH harvard oxford atlas (#132)
Browse files Browse the repository at this point in the history
* EHN Harvard-Oxford atlas

* auto detec preset atlas

* Help str get the atlas automatically as well

* ADD atlas to the docker atlas download tool

* ADD changelog

* FIX add harvard-oxford to atlas download and trim down the docker file

* docker will test all default atlas

* docker artefacts should have different names, output path can be the same

* remove th0 since the coverage is too liberal and makes little sense

* can if statement run under job id?

* TEST job level if statement cannot access env
  • Loading branch information
htwangtw authored Apr 12, 2024
1 parent 5563f19 commit 38c3fb3
Show file tree
Hide file tree
Showing 12 changed files with 75 additions and 14 deletions.
13 changes: 8 additions & 5 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ on:
- 'Dockerfile'
- 'requirements.txt'
- '.github/workflows/docker.yml'
- 'tools/*'
release:
types: [published]

Expand All @@ -26,8 +27,6 @@ env:
REPO_NAME: giga_connectome
DATA: /home/runner/work/giga_connectome/giga_connectome/giga_connectome/data/test_data
IMAGE: /home/runner/work/giga_connectome/giga_connectome/docker
IS_TAGGED: ${{ github.ref_type == 'tag' }}
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}

jobs:
download-test-data:
Expand Down Expand Up @@ -70,6 +69,9 @@ jobs:
docker-run:
runs-on: ubuntu-latest
needs: [download-test-data, docker-build]
strategy:
matrix:
atlas: ['Schaefer20187Networks', 'MIST', 'DiFuMo', 'HarvardOxfordCortical', 'HarvardOxfordCorticalSymmetricSplit', 'HarvardOxfordSubcortical']
steps:
- uses: actions/checkout@v4
with:
Expand All @@ -96,13 +98,14 @@ jobs:
/outputs \
participant \
-w /work \
--atlas ${{ matrix.atlas }} \
--participant_label 1 \
--reindex-bids
- name: Upload output artifact
uses: actions/upload-artifact@v4
with:
name: connectome
name: connectome_${{ matrix.atlas }}
path: ./outputs/

docker-push:
Expand All @@ -111,6 +114,7 @@ jobs:
defaults:
run:
shell: bash -el {0}
if: ${{ github.ref == 'refs/heads/main' || github.ref_type == 'tag' }}
steps:
- uses: actions/checkout@v4
with:
Expand All @@ -128,14 +132,13 @@ jobs:
- name: Load image
run: docker load -i ${{ env.IMAGE }}/image.tar
- name: Push unstable to dockerhub on tags or on main
if: ${{ env.IS_MAIN == 'true' || env.IS_TAGGED == 'true' }}
run: |
echo "Pushing unstable versions to DockerHub"
unstable="${{env.USER_NAME}}/${{env.REPO_NAME}}:unstable"
docker tag "${{env.USER_NAME}}/${{env.REPO_NAME}}" "${unstable}"
docker push "${unstable}"
- name: Push stable release to dockerhub on tags only
if: ${{ env.IS_TAGGED == 'true' }}
if: ${{ github.ref_type == 'tag' }}
run: |
echo "Pushing stable and latest versions to DockerHub for latest and ${{ github.ref_name }}"
Expand Down
1 change: 0 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ COPY [".", "/code"]

RUN pip3 install --no-cache-dir pip==24.0 && \
pip3 install --no-cache-dir --requirement requirements.txt && \
python3 -c "from templateflow.api import get; get(['MNI152NLin2009cAsym', 'MNI152NLin6Asym'])" && \
pip3 --no-cache-dir install .

ENV TEMPLATEFLOW_HOME=${TEMPLATEFLOW_HOME}
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ Pull from `Dockerhub` (Recommended)

```bash
docker pull bids/giga_connectome:latest
docker run -ti --rm --read-only bids/giga_connectome --help
docker run -ti --rm bids/giga_connectome --help
```

If you want to get the bleeding-edge version of the app,
Expand Down
5 changes: 5 additions & 0 deletions docs/source/changes.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,18 @@ Released MONTH YEAR

### New

- [EHN] Add Harvard-Oxford atlas. (@htwangtw) [#117](https://github.com/bids-apps/giga_connectome/issues/117)
- [DOCS] Improved documentation on using customised configuration files. (@htwangtw)
- [ENH] use logger instead of print statements. (@Remi-Gau)

### Fixes

- [FIX] Bump nilearn version to 0.10.2 to fix issues [#26](https://github.com/bids-apps/giga_connectome/issues/26) and [#27](https://github.com/bids-apps/giga_connectome/issues/27). (@Remi-Gau)

### Enhancements

- [ENH] Reduce the docker image size. (@htwangtw)

### Changes

- [ENH] Make output more BIDS compliant. (@Remi-Gau)
Expand Down
13 changes: 9 additions & 4 deletions giga_connectome/atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,6 @@

gc_log = gc_logger()


PRESET_ATLAS = ["DiFuMo", "MIST", "Schaefer20187Networks"]

ATLAS_CONFIG_TYPE = TypedDict(
"ATLAS_CONFIG_TYPE",
{
Expand Down Expand Up @@ -153,6 +150,12 @@ def resample_atlas_collection(
return resampled_atlases


def get_atlas_labels() -> List[str]:
"""Get the list of available atlas labels."""
atlas_dir = resource_filename("giga_connectome", "data/atlas")
return [p.stem for p in Path(atlas_dir).glob("*.json")]


def _check_altas_config(
atlas: str | Path | dict[str, Any]
) -> ATLAS_CONFIG_TYPE:
Expand All @@ -174,8 +177,10 @@ def _check_altas_config(
atlas configuration not containing the correct keys.
"""
# load the file first if the input is not already a dictionary
atlas_dir = resource_filename("giga_connectome", "data/atlas")
preset_atlas = [p.stem for p in Path(atlas_dir).glob("*.json")]
if isinstance(atlas, (str, Path)):
if atlas in PRESET_ATLAS:
if atlas in preset_atlas:
config_path = Path(
resource_filename(
"giga_connectome", f"data/atlas/{atlas}.json"
Expand Down
14 changes: 14 additions & 0 deletions giga_connectome/data/atlas/HarvardOxfordCortical.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"name": "HarvardOxfordCortical",
"parameters": {
"atlas": "HOCPA",
"template": "MNI152NLin2009cAsym",
"resolution": "02",
"suffix": "dseg"
},
"desc": [
"th25",
"th50"
],
"templateflow_dir" : null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"name": "HarvardOxfordCorticalSymmetricSplit",
"parameters": {
"atlas": "HOCPAL",
"template": "MNI152NLin2009cAsym",
"resolution": "02",
"suffix": "dseg"
},
"desc": [
"th25",
"th50"
],
"templateflow_dir" : null
}
14 changes: 14 additions & 0 deletions giga_connectome/data/atlas/HarvardOxfordSubcortical.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"name": "HarvardOxfordSubcortical",
"parameters": {
"atlas": "HOSPA",
"template": "MNI152NLin2009cAsym",
"resolution": "02",
"suffix": "dseg"
},
"desc": [
"th25",
"th50"
],
"templateflow_dir" : null
}
5 changes: 4 additions & 1 deletion giga_connectome/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@

from giga_connectome import __version__
from giga_connectome.workflow import workflow
from giga_connectome.atlas import get_atlas_labels

preset_atlas = get_atlas_labels()


def global_parser() -> argparse.ArgumentParser:
Expand Down Expand Up @@ -58,7 +61,7 @@ def global_parser() -> argparse.ArgumentParser:
parser.add_argument(
"--atlas",
help="The choice of atlas for time series extraction. Default atlas "
"choices are: 'Schaefer20187Networks, 'MIST', 'DiFuMo'. User can pass "
f"choices are: {preset_atlas}. User can pass "
"a path to a json file containing configuration for their own choice "
"of atlas. The default is 'Schaefer20187Networks'.",
default="Schaefer20187Networks",
Expand Down
2 changes: 2 additions & 0 deletions giga_connectome/tests/test_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,5 @@
def test_load_atlas_setting():
atlas_config = load_atlas_setting("Schaefer20187Networks")
assert atlas_config["name"] == "Schaefer20187Networks"
atlas_config = load_atlas_setting("HarvardOxfordCortical")
assert atlas_config["name"] == "HarvardOxfordCortical"
4 changes: 3 additions & 1 deletion tools/download_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,13 @@

def fetch_tpl_atlas() -> None:
"""Download datasets from templateflow."""
atlases = ["Schaefer2018", "DiFuMo"]
atlases = ["Schaefer2018", "DiFuMo", "HOSPA", "HOCPA", "HOCPAL"]
for atlas in atlases:
tf_path = tf.api.get("MNI152NLin2009cAsym", atlas=atlas)
if isinstance(tf_path, list) and len(tf_path) > 0:
gc_log.info(f"{atlas} exists.")
else:
gc_log.error(f"{atlas} does not exist.")
# download MNI grey matter template
tf.api.get("MNI152NLin2009cAsym", label="GM")

Expand Down
2 changes: 1 addition & 1 deletion tools/mist2templateflow
Submodule mist2templateflow updated 0 files

0 comments on commit 38c3fb3

Please sign in to comment.