From b2a658c0f02d1a4ef0982f56ed8fd3c14bc46b7c Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 8 Jan 2024 21:25:39 +0000 Subject: [PATCH 01/41] add workflow --- .github/workflows/code_linting.yml | 70 ++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 .github/workflows/code_linting.yml diff --git a/.github/workflows/code_linting.yml b/.github/workflows/code_linting.yml new file mode 100644 index 000000000..ad80f9f83 --- /dev/null +++ b/.github/workflows/code_linting.yml @@ -0,0 +1,70 @@ +name: Code Linting +run-name: Code Linting (automated) +on: + issue_comment: + types: [created] + +jobs: + fix-linting: + # Only run if comment is on a PR with the main repo, and if it contains the magic keywords + if: > + contains(github.event.comment.html_url, '/pull/') && + contains(github.event.comment.body, '@nf-scil-bot fix linting') && + github.repository == 'scilus/nf-scil' + runs-on: ubuntu-latest + steps: + # Use the @nf-scil-bot token to check out so we can push later + - uses: actions/checkout@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + # indication that the linting is being fixed + - name: React on comment + uses: peter-evans/create-or-update-comment@v3 + with: + comment-id: ${{ github.event.comment.id }} + reactions: eyes + + # Action runs on the issue comment, so we don't get the PR by default + # Use the gh cli to check out the PR + - name: Checkout Pull Request + run: gh pr checkout ${{ github.event.issue.number }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - uses: actions/setup-node@v4 + + - name: Install Prettier + run: npm install -g prettier @prettier/plugin-php + + # Check that we actually need to fix something + - name: Run 'prettier --check' + id: prettier_status + run: | + if prettier --check ${GITHUB_WORKSPACE}; then + echo "::set-output name=result::pass" + else + echo "::set-output name=result::fail" + fi + - name: Run 'prettier --write' + if: steps.prettier_status.outputs.result == 'fail' + run: prettier --write ${GITHUB_WORKSPACE} + + - name: Post nothing-to-do comment + if: steps.prettier_status.outputs.result == 'pass' + uses: peter-evans/create-or-update-comment@v3 + with: + issue-number: ${{ github.event.issue.number }} + body: | + Nothing for me to do here! :shrug: + This is probably because the linting errors come from `nf-core lint` and have to be fixed manually (or with `nf-core lint --fix`). + - name: Commit & push changes + if: steps.prettier_status.outputs.result == 'fail' + run: | + git config user.email "scil@nf-sc.il" + git config user.name "nf-scil-bot" + git config push.default upstream + git add . + git status + git commit -m "[automated] Fix linting with Prettier" + git push \ No newline at end of file From dfc4e9fc92083c68f4b3309b905a80c9364eddca Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 9 Jan 2024 04:25:07 +0000 Subject: [PATCH 02/41] add schema for metadata yml --- modules/meta-schema.json | 186 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 186 insertions(+) create mode 100644 modules/meta-schema.json diff --git a/modules/meta-schema.json b/modules/meta-schema.json new file mode 100644 index 000000000..0b855d76d --- /dev/null +++ b/modules/meta-schema.json @@ -0,0 +1,186 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "title": "Meta yaml", + "description": "Validate the meta yaml file for an nf-core module", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the module" + }, + "description": { + "type": "string", + "description": "Description of the module" + }, + "keywords": { + "type": "array", + "description": "Keywords for the module", + "items": { + "type": "string" + }, + "minItems": 3 + }, + "authors": { + "type": "array", + "description": "Authors of the module", + "items": { + "type": "string" + } + }, + "maintainers": { + "type": "array", + "description": "Maintainers of the module", + "items": { + "type": "string" + } + }, + "input": { + "type": "array", + "description": "Input channels for the module", + "items": { + "type": "object", + "patternProperties": { + ".*": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Type of the input channel", + "enum": ["map", "file", "directory", "string", "integer", "float", "boolean", "list"] + }, + "description": { + "type": "string", + "description": "Description of the input channel" + }, + "pattern": { + "type": "string", + "description": "Pattern of the input channel, given in Java glob syntax" + }, + "default": { + "type": ["string", "number", "boolean", "array", "object"], + "description": "Default value for the input channel" + }, + "enum": { + "type": "array", + "description": "List of allowed values for the input channel", + "items": { + "type": ["string", "number", "boolean", "array", "object"] + }, + "uniqueItems": true + } + }, + "required": ["type", "description"] + } + } + } + }, + "output": { + "type": "array", + "description": "Output channels for the module", + "items": { + "type": "object", + "patternProperties": { + ".*": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Type of the output channel", + "enum": ["map", "file", "directory", "string", "integer", "float", "boolean", "list"] + }, + "description": { + "type": "string", + "description": "Description of the output channel" + }, + "pattern": { + "type": "string", + "description": "Pattern of the input channel, given in Java glob syntax" + }, + "enum": { + "type": "array", + "description": "List of allowed values for the output channel", + "items": { + "type": ["string", "number", "boolean", "array", "object"] + }, + "uniqueItems": true + } + }, + "required": ["type", "description"] + } + } + } + }, + "tools": { + "type": "array", + "description": "Tools used by the module", + "items": { + "type": "object", + "patternProperties": { + ".*": { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "Description of the output channel" + }, + "homepage": { + "type": "string", + "description": "Homepage of the tool", + "pattern": "^(http|https)://.*$" + }, + "documentation": { + "type": "string", + "description": "Documentation of the tool", + "pattern": "^(http|https|ftp)://.*$" + }, + "tool_dev_url": { + "type": "string", + "description": "URL of the development version of the tool's documentation", + "pattern": "^(http|https)://.*$" + }, + "doi": { + "description": "DOI of the tool", + "anyOf": [ + { + "type": "string", + "pattern": "^10\\.\\d{4,9}\\/[^,]+$" + }, + { + "type": "string", + "enum": ["no DOI available"] + } + ] + }, + "licence": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Licence of the tool", + "minItems": 1, + "uniqueItems": true, + "message": "Licence must be an array of one or more entries, e.g. [\"MIT\"]" + } + }, + "required": ["description"], + "anyOf": [ + { + "required": ["homepage"] + }, + { + "required": ["documentation"] + }, + { + "required": ["tool_dev_url"] + }, + { + "required": ["doi"] + } + ] + } + } + } + } + }, + "required": ["name", "description", "keywords", "authors", "output", "tools"] +} From e7dc8cd607d8f81c6bc3b9701f1cdef299615295 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 20 Nov 2023 13:38:25 -0500 Subject: [PATCH 03/41] add linting, sync some files with nf-core. try better container setup --- .devcontainer/devcontainer.json | 37 +++++++++++++++++++++++++++++++- .devcontainer/setup_container.sh | 8 ++++--- .editorconfig | 12 +++++++++++ .gitignore | 2 ++ .pre-commit-config.yaml | 13 +++++++++++ .prettierignore | 7 ++++++ .prettierrc.yml | 1 + .requirements.nf-core | 8 +++++++ poetry.toml | 2 ++ pytest.ini | 6 ++++++ sync-nf-core.sh | 30 ++++++++++++++++++++++++++ 11 files changed, 122 insertions(+), 4 deletions(-) create mode 100644 .editorconfig create mode 100644 .pre-commit-config.yaml create mode 100644 .prettierignore create mode 100644 .prettierrc.yml create mode 100644 .requirements.nf-core create mode 100644 poetry.toml create mode 100644 pytest.ini create mode 100644 sync-nf-core.sh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4196c8914..01903b797 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -16,8 +16,43 @@ }, "customizations": { "vscode": { + "settings": { + "pythonIndent.keepHangingBracketOnLine": true, + "pythonIndent.trimLinesWithOnlyWhitespace": true, + "python.createEnvironment.trigger": "off", + "python.languageServer": "Pylance", + "python.missingPackage.severity": "Warning", + "python.poetryPath": "/root/.local/bin/poetry", + "python.terminal.activateEnvInCurrentTerminal": true, + "python.terminal.focusAfterLaunch": true, + "python.testing.promptToConfigure": false, + "python.analysis.autoFormatStrings": true, + "python.analysis.autoImportCompletions": true, + "python.analysis.completeFunctionParens": true, + "python.analysis.exclude": [ + "${workspaceFolder}/.dvc", + "${workspaceFolder}/.pytest_cache", + "${workspaceFolder}/.test_data", + "${workspaceFolder}/.vscode", + "${workspaceFolder}/nf-scil-extensions", + "**/__pycache__", + "${workspaceFolder}/.git" + ], + "python.analysis.ignore": [ + "${workspaceFolder}/.dvc", + "${workspaceFolder}/.pytest_cache", + "${workspaceFolder}/.test_data", + "${workspaceFolder}/.vscode", + "${workspaceFolder}/nf-scil-extensions", + "**/__pycache__", + "${workspaceFolder}/.git" + ], + "python.analysis.importFormat": "relative", + "python.analysis.logLevel": "Warning" + }, "extensions": [ - "AlexVCaron.nf-scil-extensions" + "AlexVCaron.nf-scil-extensions", + "ms-python.autopep8" ] } }, diff --git a/.devcontainer/setup_container.sh b/.devcontainer/setup_container.sh index 4ad1abb16..5fbde2154 100644 --- a/.devcontainer/setup_container.sh +++ b/.devcontainer/setup_container.sh @@ -1,6 +1,8 @@ #!/usr/bin/env bash -poetry install +mkdir /nf-scil-poetry-venv +ln -s /nf-scil-poetry-venv .venv +#poetry install --no-root +#echo "export PROFILE=docker" >> ~/.bashrc -echo "poetry shell" >> ~/.bashrc -echo "export PROFILE=docker" >> ~/.bashrc +# diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..014c2383b --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +indent_size = 4 +indent_style = space + +[*.{md,yml,yaml,html,css,scss,js,cff}] +indent_size = 2 diff --git a/.gitignore b/.gitignore index 6ca56f833..9c575d5ed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,6 @@ +.pytest_cache/ .work/ +.venv/ .nextflow* *.pyc *.vsix diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..b7aeeb5bc --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,13 @@ +repos: + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v2.7.1" + hooks: + - id: prettier diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000..4cd77bb4e --- /dev/null +++ b/.prettierignore @@ -0,0 +1,7 @@ +email_template.html +adaptivecard.json +slackreport.json +docs/api/_build +testing +nf_core/module-template/modules/meta.yml +nf_core/module-template/tests/test.yml diff --git a/.prettierrc.yml b/.prettierrc.yml new file mode 100644 index 000000000..c81f9a766 --- /dev/null +++ b/.prettierrc.yml @@ -0,0 +1 @@ +printWidth: 120 diff --git a/.requirements.nf-core b/.requirements.nf-core new file mode 100644 index 000000000..360f6ff87 --- /dev/null +++ b/.requirements.nf-core @@ -0,0 +1,8 @@ +black +isort +myst_parser +pytest-cov +pytest-datafiles +responses +Sphinx +sphinx-rtd-theme diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 000000000..ab1033bd3 --- /dev/null +++ b/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..652bdf8e5 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,6 @@ +[pytest] +filterwarnings = + ignore::pytest.PytestRemovedIn8Warning:_pytest.nodes:140 +testpaths = + tests +python_files = test_*.py diff --git a/sync-nf-core.sh b/sync-nf-core.sh new file mode 100644 index 000000000..fbb40534b --- /dev/null +++ b/sync-nf-core.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash + +nfcore_ver="$(poetry show nf-core | sed -n 's/\s*version\s\+:\s\+\([0-9.]\+\).*/\1/p')" +echo "version : $nfcore_ver" + +wget -O /workspaces/nf-scil/.pre-commit-config.yaml \ + https://github.com/nf-core/tools/raw/$nfcore_ver/.pre-commit-config.yaml + +wget -O /workspaces/nf-scil/.editorconfig \ + https://github.com/nf-core/tools/raw/$nfcore_ver/.editorconfig + +wget -O /workspaces/nf-scil/.prettierignore \ + https://github.com/nf-core/tools/raw/$nfcore_ver/.prettierignore + +wget -O /workspaces/nf-scil/.prettierrc.yml \ + https://github.com/nf-core/tools/raw/$nfcore_ver/.prettierrc.yml + +wget -O /workspaces/nf-scil/pytest.ini \ + https://github.com/nf-core/tools/raw/$nfcore_ver/pytest.ini + +wget -O /workspaces/nf-scil/.requirements.nf-core.new \ + https://github.com/nf-core/tools/raw/$nfcore_ver/requirements-dev.txt + +touch .requirements.nf-core +cat .requirements.nf-core | xargs poetry remove +cat .requirements.nf-core.new | xargs poetry add +mv .requirements.nf-core.new .requirements.nf-core +poetry lock + +pre-commit install --install-hooks From dedcb56aef11c0a3af73c83eccf516d4a954b4f7 Mon Sep 17 00:00:00 2001 From: Alex Valcourt Caron Date: Wed, 29 Nov 2023 10:09:54 -0500 Subject: [PATCH 04/41] bind venv in container to volume (speeds up container uptime) --- .devcontainer/devcontainer.json | 129 ++++++++++++++++--------------- .devcontainer/setup_container.sh | 7 +- 2 files changed, 70 insertions(+), 66 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 01903b797..dc4088a0c 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,61 +1,68 @@ -{ - "name": "NF-SCIL development container", - "build": { "dockerfile": "Dockerfile" }, - "forwardPorts": [3000], - "onCreateCommand": "bash .devcontainer/setup_container.sh", - "features": { - "ghcr.io/devcontainers/features/git:1": {}, - "ghcr.io/devcontainers/features/git-lfs:1": {}, - "ghcr.io/devcontainers/features/github-cli:1": {}, - "ghcr.io/devcontainers/features/docker-in-docker:2": {}, - "ghcr.io/devcontainers-contrib/features/apt-get-packages:1": {}, - "ghcr.io/robsyme/features/nextflow:1": {}, - "ghcr.io/devcontainers-contrib/features/curl-apt-get:1": {}, - "ghcr.io/devcontainers-contrib/features/tmux-apt-get:1": {}, - "ghcr.io/devcontainers-contrib/features/wget-apt-get:1": {} - }, - "customizations": { - "vscode": { - "settings": { - "pythonIndent.keepHangingBracketOnLine": true, - "pythonIndent.trimLinesWithOnlyWhitespace": true, - "python.createEnvironment.trigger": "off", - "python.languageServer": "Pylance", - "python.missingPackage.severity": "Warning", - "python.poetryPath": "/root/.local/bin/poetry", - "python.terminal.activateEnvInCurrentTerminal": true, - "python.terminal.focusAfterLaunch": true, - "python.testing.promptToConfigure": false, - "python.analysis.autoFormatStrings": true, - "python.analysis.autoImportCompletions": true, - "python.analysis.completeFunctionParens": true, - "python.analysis.exclude": [ - "${workspaceFolder}/.dvc", - "${workspaceFolder}/.pytest_cache", - "${workspaceFolder}/.test_data", - "${workspaceFolder}/.vscode", - "${workspaceFolder}/nf-scil-extensions", - "**/__pycache__", - "${workspaceFolder}/.git" - ], - "python.analysis.ignore": [ - "${workspaceFolder}/.dvc", - "${workspaceFolder}/.pytest_cache", - "${workspaceFolder}/.test_data", - "${workspaceFolder}/.vscode", - "${workspaceFolder}/nf-scil-extensions", - "**/__pycache__", - "${workspaceFolder}/.git" - ], - "python.analysis.importFormat": "relative", - "python.analysis.logLevel": "Warning" - }, - "extensions": [ - "AlexVCaron.nf-scil-extensions", - "ms-python.autopep8" - ] - } - }, - "init": true, - "privileged": true -} +{ + "name": "NF-SCIL development container", + "build": { "dockerfile": "Dockerfile" }, + "forwardPorts": [3000], + "onCreateCommand": "bash .devcontainer/setup_container.sh", + "features": { + "ghcr.io/devcontainers/features/git:1": {}, + "ghcr.io/devcontainers/features/git-lfs:1": {}, + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, + "ghcr.io/devcontainers-contrib/features/apt-get-packages:1": {}, + "ghcr.io/robsyme/features/nextflow:1": {}, + "ghcr.io/devcontainers-contrib/features/curl-apt-get:1": {}, + "ghcr.io/devcontainers-contrib/features/tmux-apt-get:1": {}, + "ghcr.io/devcontainers-contrib/features/wget-apt-get:1": {} + }, + "mounts": [ + { + "source": "nf-scil-venv", + "target": "/workspaces/nf-scil/.venv", + "type": "volume" + } + ], + "customizations": { + "vscode": { + "settings": { + "pythonIndent.keepHangingBracketOnLine": true, + "pythonIndent.trimLinesWithOnlyWhitespace": true, + "python.createEnvironment.trigger": "off", + "python.languageServer": "Pylance", + "python.missingPackage.severity": "Warning", + "python.poetryPath": "/root/.local/bin/poetry", + "python.terminal.activateEnvInCurrentTerminal": true, + "python.terminal.focusAfterLaunch": true, + "python.testing.promptToConfigure": false, + "python.analysis.autoFormatStrings": true, + "python.analysis.autoImportCompletions": true, + "python.analysis.completeFunctionParens": true, + "python.analysis.exclude": [ + "${workspaceFolder}/.dvc", + "${workspaceFolder}/.pytest_cache", + "${workspaceFolder}/.test_data", + "${workspaceFolder}/.vscode", + "${workspaceFolder}/nf-scil-extensions", + "**/__pycache__", + "${workspaceFolder}/.git" + ], + "python.analysis.ignore": [ + "${workspaceFolder}/.dvc", + "${workspaceFolder}/.pytest_cache", + "${workspaceFolder}/.test_data", + "${workspaceFolder}/.vscode", + "${workspaceFolder}/nf-scil-extensions", + "**/__pycache__", + "${workspaceFolder}/.git" + ], + "python.analysis.importFormat": "relative", + "python.analysis.logLevel": "Warning" + }, + "extensions": [ + "AlexVCaron.nf-scil-extensions", + "ms-python.autopep8" + ] + } + }, + "init": true, + "privileged": true +} diff --git a/.devcontainer/setup_container.sh b/.devcontainer/setup_container.sh index 5fbde2154..63986b60b 100644 --- a/.devcontainer/setup_container.sh +++ b/.devcontainer/setup_container.sh @@ -1,8 +1,5 @@ #!/usr/bin/env bash -mkdir /nf-scil-poetry-venv -ln -s /nf-scil-poetry-venv .venv -#poetry install --no-root -#echo "export PROFILE=docker" >> ~/.bashrc +poetry install --no-root +echo "export PROFILE=docker" >> ~/.bashrc -# From 78e6c5c157e3755eb3b97ab1617827727826efdb Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Wed, 29 Nov 2023 18:08:42 +0000 Subject: [PATCH 05/41] add git settings to workspace --- .vscode/settings.json | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 5ec947fe8..05944fcdb 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,11 @@ { + "git.allowForcePush": true, + "git.confirmForcePush": true, + "git.useForcePushIfIncludes": true, + "git.useForcePushWithLease": true, + "git.branchProtection": ["main"], + "git.branchProtectionPrompt": "alwaysPrompt", + "github.branchProtection": true, "cSpell.words": [ "accumbens", "altnp", @@ -86,4 +93,4 @@ "wmparc", "xform" ] -} \ No newline at end of file +} From e6e3e5ad433ac60015ac92f10f7896d5120965a2 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Wed, 29 Nov 2023 18:12:00 +0000 Subject: [PATCH 06/41] remove precommit hook. It's a bit hard to use and not fun when it blocks for no reasons --- .pre-commit-config.yaml | 13 ------------- sync-nf-core.sh | 5 ----- 2 files changed, 18 deletions(-) delete mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index b7aeeb5bc..000000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,13 +0,0 @@ -repos: - - repo: https://github.com/psf/black - rev: 23.1.0 - hooks: - - id: black - - repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - - id: isort - - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.7.1" - hooks: - - id: prettier diff --git a/sync-nf-core.sh b/sync-nf-core.sh index fbb40534b..6ecf36ed4 100644 --- a/sync-nf-core.sh +++ b/sync-nf-core.sh @@ -3,9 +3,6 @@ nfcore_ver="$(poetry show nf-core | sed -n 's/\s*version\s\+:\s\+\([0-9.]\+\).*/\1/p')" echo "version : $nfcore_ver" -wget -O /workspaces/nf-scil/.pre-commit-config.yaml \ - https://github.com/nf-core/tools/raw/$nfcore_ver/.pre-commit-config.yaml - wget -O /workspaces/nf-scil/.editorconfig \ https://github.com/nf-core/tools/raw/$nfcore_ver/.editorconfig @@ -26,5 +23,3 @@ cat .requirements.nf-core | xargs poetry remove cat .requirements.nf-core.new | xargs poetry add mv .requirements.nf-core.new .requirements.nf-core poetry lock - -pre-commit install --install-hooks From ed789c3eb3a569457d8b78893223cc642a889e4b Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Wed, 29 Nov 2023 18:24:19 +0000 Subject: [PATCH 07/41] Update with nf-core. regenerate lock file. --- poetry.lock | 497 ++++++++++++++++++++++++++++++++++++++++++++++-- pyproject.toml | 70 ++++--- run_tests.sh | 0 sync-nf-core.sh | 0 4 files changed, 519 insertions(+), 48 deletions(-) mode change 100644 => 100755 run_tests.sh mode change 100644 => 100755 sync-nf-core.sh diff --git a/poetry.lock b/poetry.lock index f9600f013..1f547f3ba 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,15 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] [[package]] name = "attmap" @@ -32,6 +43,66 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +[[package]] +name = "babel" +version = "2.13.1" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, + {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} +setuptools = {version = "*", markers = "python_version >= \"3.12\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "black" +version = "23.11.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, + {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, + {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, + {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, + {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, + {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, + {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, + {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, + {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, + {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, + {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, + {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, + {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, + {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, + {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, + {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, + {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, + {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "cattrs" version = "23.2.2" @@ -203,6 +274,73 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.3.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "distlib" version = "0.3.7" @@ -214,6 +352,17 @@ files = [ {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, ] +[[package]] +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] + [[package]] name = "eido" version = "0.2.2" @@ -400,13 +549,24 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] [[package]] @@ -457,6 +617,23 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + [[package]] name = "jinja2" version = "3.1.2" @@ -634,6 +811,25 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "mdit-py-plugins" +version = "0.4.0" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, + {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "mdurl" version = "0.1.2" @@ -645,6 +841,43 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "myst-parser" +version = "2.0.0" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.8" +files = [ + {file = "myst_parser-2.0.0-py3-none-any.whl", hash = "sha256:7c36344ae39c8e740dad7fdabf5aa6fc4897a813083c6cc9990044eb93656b14"}, + {file = "myst_parser-2.0.0.tar.gz", hash = "sha256:ea929a67a6a0b1683cdbe19b8d2e724cd7643f8aa3e7bb18dd65beac3483bead"}, +] + +[package.dependencies] +docutils = ">=0.16,<0.21" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" +pyyaml = "*" +sphinx = ">=6,<8" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.8.2,<0.9.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"] + [[package]] name = "nf-core" version = "2.10" @@ -850,6 +1083,17 @@ sql-other = ["SQLAlchemy (>=1.4.16)"] test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.6.3)"] +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + [[package]] name = "peppy" version = "0.35.7" @@ -1205,6 +1449,38 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-datafiles" +version = "3.0.0" +description = "py.test plugin to create a 'tmp_path' containing predefined files/directories." +optional = false +python-versions = "*" +files = [ + {file = "pytest-datafiles-3.0.0.tar.gz", hash = "sha256:a70c4c66a36d1cdcfc095607f04eee66eaef3fa64cbb62d60c47ce169901d1d4"}, + {file = "pytest_datafiles-3.0.0-py2.py3-none-any.whl", hash = "sha256:2176e10d3f6e76f358925a897e21e2bcc5a0170b92fac4e66ed055eaa2ca6a22"}, +] + +[package.dependencies] +pytest = ">=3.6" + [[package]] name = "pytest-workflow" version = "2.0.1" @@ -1378,20 +1654,20 @@ yacman = ">=0.8.3" [[package]] name = "requests" -version = "2.28.2" +version = "2.31.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -1427,6 +1703,25 @@ redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] yaml = ["pyyaml (>=5.4)"] +[[package]] +name = "responses" +version = "0.24.1" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "responses-0.24.1-py3-none-any.whl", hash = "sha256:a2b43f4c08bfb9c9bd242568328c65a34b318741d3fab884ac843c5ceeb543f9"}, + {file = "responses-0.24.1.tar.gz", hash = "sha256:b127c6ca3f8df0eb9cc82fd93109a3007a86acb24871834c47b77765152ecf8c"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] + [[package]] name = "rich" version = "13.7.0" @@ -1611,6 +1906,174 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "7.1.0" +description = "Python documentation generator" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx-7.1.0-py3-none-any.whl", hash = "sha256:9bdfb5a2b28351d4fdf40a63cd006dbad727f793b243e669fc950d7116c634af"}, + {file = "sphinx-7.1.0.tar.gz", hash = "sha256:8f336d0221c3beb23006b3164ed1d46db9cebcce9cb41cdb9c5ecd4bcc509be0"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.21" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.13" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] + +[[package]] +name = "sphinx-rtd-theme" +version = "2.0.0" +description = "Read the Docs theme for Sphinx" +optional = false +python-versions = ">=3.6" +files = [ + {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, + {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, +] + +[package.dependencies] +docutils = "<0.21" +sphinx = ">=5,<8" +sphinxcontrib-jquery = ">=4,<5" + +[package.extras] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +description = "Extension to include jQuery on newer Sphinx releases" +optional = false +python-versions = ">=2.7" +files = [ + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, +] + +[package.dependencies] +Sphinx = ">=1.8" + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + [[package]] name = "sqlalchemy" version = "2.0.23" @@ -1807,19 +2270,19 @@ six = "*" [[package]] name = "urllib3" -version = "1.26.18" +version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" @@ -1888,4 +2351,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "<3.13,>=3.8" -content-hash = "007c386ee5f13d0ede5f0159dfa44fd89b1a0eaeeccfdf1359e33fc81fafac74" +content-hash = "a1b2f44ab9bd5171a0f0f573a700bdbd011e557dfd43548f07cf2f981574ed92" diff --git a/pyproject.toml b/pyproject.toml index f54aae33f..ea6f92a4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,31 +1,39 @@ -[build-system] - -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" - -[tool.poetry] - -name = "nf-scil-tools" -version = "0.1.0" -description = "Tools for the NF-SCIL project" -license = "MIT" -readme = "README.md" - -authors = ["The SCIL developers"] - -maintainers = [ - "alex valcourt caron ", - "arnaud boré " -] - -homepage = "https://scil.usherbrooke.ca/" - -repository = "https://github.com/scilus/nf-scil.git" - -[tool.poetry.dependencies] - -python = "<3.13,>=3.8" -nf-core = "^2.0.0" -nibabel = "^5.1.0" -numpy = "^1.24" -requests = "2.28.*" +[build-system] + +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] + +name = "nf-scil-tools" +version = "0.1.0" +description = "Tools for the NF-SCIL project" +license = "MIT" +readme = "README.md" + +authors = ["The SCIL developers"] + +maintainers = [ + "alex valcourt caron ", + "arnaud boré " +] + +homepage = "https://scil.usherbrooke.ca/" + +repository = "https://github.com/scilus/nf-scil.git" + +[tool.poetry.dependencies] + +python = "<3.13,>=3.8" +nf-core = "^2.0.0" +nibabel = "^5.1.0" +numpy = "^1.24" +requests = "^2.28" +sphinx = "<=7.1.0" +black = "^23.11.0" +isort = "^5.12.0" +myst-parser = "^2.0.0" +pytest-cov = "^4.1.0" +pytest-datafiles = "^3.0.0" +responses = "^0.24.1" +sphinx-rtd-theme = "^2.0.0" diff --git a/run_tests.sh b/run_tests.sh old mode 100644 new mode 100755 diff --git a/sync-nf-core.sh b/sync-nf-core.sh old mode 100644 new mode 100755 From 649646f71b1438bd2085b14bddb87f5cbf848c5e Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Wed, 29 Nov 2023 18:29:11 +0000 Subject: [PATCH 08/41] move vscode settings so they have the widest range of application possible --- .devcontainer/devcontainer.json | 12 +----------- .vscode/settings.json | 12 ++++++++++++ 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index dc4088a0c..04b07e061 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -27,15 +27,7 @@ "pythonIndent.keepHangingBracketOnLine": true, "pythonIndent.trimLinesWithOnlyWhitespace": true, "python.createEnvironment.trigger": "off", - "python.languageServer": "Pylance", - "python.missingPackage.severity": "Warning", "python.poetryPath": "/root/.local/bin/poetry", - "python.terminal.activateEnvInCurrentTerminal": true, - "python.terminal.focusAfterLaunch": true, - "python.testing.promptToConfigure": false, - "python.analysis.autoFormatStrings": true, - "python.analysis.autoImportCompletions": true, - "python.analysis.completeFunctionParens": true, "python.analysis.exclude": [ "${workspaceFolder}/.dvc", "${workspaceFolder}/.pytest_cache", @@ -53,9 +45,7 @@ "${workspaceFolder}/nf-scil-extensions", "**/__pycache__", "${workspaceFolder}/.git" - ], - "python.analysis.importFormat": "relative", - "python.analysis.logLevel": "Warning" + ] }, "extensions": [ "AlexVCaron.nf-scil-extensions", diff --git a/.vscode/settings.json b/.vscode/settings.json index 05944fcdb..a0bf1d3b9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,16 @@ { + "python.languageServer": "Pylance", + "python.missingPackage.severity": "Warning", + "python.terminal.activateEnvInCurrentTerminal": true, + "python.terminal.focusAfterLaunch": true, + "python.testing.promptToConfigure": false, + "python.analysis.autoFormatStrings": true, + "python.analysis.autoImportCompletions": true, + "python.analysis.completeFunctionParens": true, + "python.analysis.exclude": ["**/__pycache__"], + "python.analysis.ignore": ["**/__pycache__"], + "python.analysis.importFormat": "relative", + "python.analysis.logLevel": "Warning", "git.allowForcePush": true, "git.confirmForcePush": true, "git.useForcePushIfIncludes": true, From aacecef45933449ee78ab55c6bd28f40f51add19 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:35:53 +0000 Subject: [PATCH 09/41] add mount for tmp so it has more space --- .devcontainer/devcontainer.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 04b07e061..0f627bb5e 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -19,6 +19,11 @@ "source": "nf-scil-venv", "target": "/workspaces/nf-scil/.venv", "type": "volume" + }, + { + "source": "nf-scil-tmp", + "target": "/tmp", + "type": "volume" } ], "customizations": { From 7e36ec683f6bc6614acede355f2e5b8c96ca12b1 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 7 Dec 2023 20:09:23 +0000 Subject: [PATCH 10/41] Add logos and TOC --- README.md | 32 ++- docs/deps/gh-md-toc | 415 +++++++++++++++++++++++++++++ docs/images/nf-scil_logo_dark.png | 3 + docs/images/nf-scil_logo_dark.svg | 1 + docs/images/nf-scil_logo_light.png | 3 + docs/images/nf-scil_logo_light.svg | 1 + 6 files changed, 454 insertions(+), 1 deletion(-) create mode 100755 docs/deps/gh-md-toc create mode 100644 docs/images/nf-scil_logo_dark.png create mode 100644 docs/images/nf-scil_logo_dark.svg create mode 100644 docs/images/nf-scil_logo_light.png create mode 100644 docs/images/nf-scil_logo_light.svg diff --git a/README.md b/README.md index 483bf6934..e6df325bb 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,9 @@ -# `nf-scil` +

+ Sublime's custom image +

+

+ Sublime's custom image +

Welcome to `nf-scil` ! A **Nextflow** modules and workflows repository for neuroimaging maintained by the [SCIL team](https://scil-documentation.readthedocs.io/en/latest/). The @@ -7,6 +12,31 @@ primary focus of the library is to provide pre-built processes and processing se technologies and made easily available to pipeline's developers through the `nf-core` framework. +* [Using modules from nf-scil](README.md#using-modules-from-nf-scil) +* [Developing in nf-scil](README.md#developing-in-nf-scil) + * [Dependencies](README.md#dependencies) + * [Developer installation](README.md#developer-installation) + * [Loading the project's environment](README.md#loading-the-projects-environment) + * [Working with VS Code](README.md#working-with-vs-code) + * [Manual configuration of the VS Code project :](README.md#manual-configuration-of-the-vs-code-project-) + * [Configuration via the devcontainer :](README.md#configuration-via-the-devcontainer-) + * [Contributing to nf-scil](README.md#contributing-to-nf-scil) + * [Adding a new module to nf-scil](docs/MODULE.md#adding-a-new-module-to-nf-scil) + * [Generate the template](docs/MODULE.md#generate-the-template) + * [Edit the template](docs/MODULE.md#edit-the-template) + * [Editing the main](docs/MODULE.md#editing-modulesnf-scilcategorytoolmainnf-) + * [Editing the metadata](docs/MODULE.md#editing--modulesnf-scilcategorytoolmetayml-) + * [Editing the test cases](docs/MODULE.md#editing-testsmodulesnf-scilcategorytoolmainnf-) + * [Editing the tests configuration](docs/MODULE.md#editing-testsmodulesnf-scilcategorytoolnextflowconfig-) + * [Run the tests to generate the test metadata file](docs/MODULE.md#run-the-tests-to-generate-the-test-metadata-file) + * [Last safety test](docs/MODULE.md#last-safety-test) + * [Submit your PR](docs/MODULE.md#submit-your-pr) + * [Defining processes optional parameters](docs/MODULE.md#defining-processes-optional-parameters) + * [Test data infrastructure](docs/MODULE.md#test-data-infrastructure) + * [Using the .test_data directory](docs/MODULE.md#using-the-test_data-directory) + * [Using Scilpy Fetcher](docs/MODULE.md#using-scilpy-fetcher) + * [Running tests](README.md#running-tests) + # Using modules from `nf-scil` To import modules from `nf-scil`, you first need to install [nf-core](https://github.com/nf-core/tools) diff --git a/docs/deps/gh-md-toc b/docs/deps/gh-md-toc new file mode 100755 index 000000000..fbe76eeb9 --- /dev/null +++ b/docs/deps/gh-md-toc @@ -0,0 +1,415 @@ +#!/usr/bin/env bash + +# +# Steps: +# +# 1. Download corresponding html file for some README.md: +# curl -s $1 +# +# 2. Discard rows where no substring 'user-content-' (github's markup): +# awk '/user-content-/ { ... +# +# 3.1 Get last number in each row like ' ... sitemap.js.*<\/h/)+2, RLENGTH-5) +# +# 5. Find anchor and insert it inside "(...)": +# substr($0, match($0, "href=\"[^\"]+?\" ")+6, RLENGTH-8) +# + +gh_toc_version="0.9.1" + +gh_user_agent="gh-md-toc v$gh_toc_version" + +# +# Download rendered into html README.md by its url. +# +# +gh_toc_load() { + local gh_url=$1 + + if type curl &>/dev/null; then + curl --user-agent "$gh_user_agent" -s "$gh_url" + elif type wget &>/dev/null; then + wget --user-agent="$gh_user_agent" -qO- "$gh_url" + else + echo "Please, install 'curl' or 'wget' and try again." + exit 1 + fi +} + +# +# Converts local md file into html by GitHub +# +# -> curl -X POST --data '{"text": "Hello world github/linguist#1 **cool**, and #1!"}' https://api.github.com/markdown +#

Hello world github/linguist#1 cool, and #1!

'" +gh_toc_md2html() { + local gh_file_md=$1 + local skip_header=$2 + + URL=https://api.github.com/markdown/raw + + if [ ! -z "$GH_TOC_TOKEN" ]; then + TOKEN=$GH_TOC_TOKEN + else + TOKEN_FILE="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/token.txt" + if [ -f "$TOKEN_FILE" ]; then + TOKEN="$(cat $TOKEN_FILE)" + fi + fi + if [ ! -z "${TOKEN}" ]; then + AUTHORIZATION="Authorization: token ${TOKEN}" + fi + + local gh_tmp_file_md=$gh_file_md + if [ "$skip_header" = "yes" ]; then + if grep -Fxq "" "$gh_src"; then + # cut everything before the toc + gh_tmp_file_md=$gh_file_md~~ + sed '1,//d' $gh_file_md > $gh_tmp_file_md + fi + fi + + # echo $URL 1>&2 + OUTPUT=$(curl -s \ + --user-agent "$gh_user_agent" \ + --data-binary @"$gh_tmp_file_md" \ + -H "Content-Type:text/plain" \ + -H "$AUTHORIZATION" \ + "$URL") + + rm -f $gh_file_md~~ + + if [ "$?" != "0" ]; then + echo "XXNetworkErrorXX" + fi + if [ "$(echo "${OUTPUT}" | awk '/API rate limit exceeded/')" != "" ]; then + echo "XXRateLimitXX" + else + echo "${OUTPUT}" + fi +} + + +# +# Is passed string url +# +gh_is_url() { + case $1 in + https* | http*) + echo "yes";; + *) + echo "no";; + esac +} + +# +# TOC generator +# +gh_toc(){ + local gh_src=$1 + local gh_src_copy=$1 + local gh_ttl_docs=$2 + local need_replace=$3 + local no_backup=$4 + local no_footer=$5 + local indent=$6 + local skip_header=$7 + + if [ "$gh_src" = "" ]; then + echo "Please, enter URL or local path for a README.md" + exit 1 + fi + + + # Show "TOC" string only if working with one document + if [ "$gh_ttl_docs" = "1" ]; then + + echo "Table of Contents" + echo "=================" + echo "" + gh_src_copy="" + + fi + + if [ "$(gh_is_url "$gh_src")" == "yes" ]; then + gh_toc_load "$gh_src" | gh_toc_grab "$gh_src_copy" "$indent" + if [ "${PIPESTATUS[0]}" != "0" ]; then + echo "Could not load remote document." + echo "Please check your url or network connectivity" + exit 1 + fi + if [ "$need_replace" = "yes" ]; then + echo + echo "!! '$gh_src' is not a local file" + echo "!! Can't insert the TOC into it." + echo + fi + else + local rawhtml=$(gh_toc_md2html "$gh_src" "$skip_header") + if [ "$rawhtml" == "XXNetworkErrorXX" ]; then + echo "Parsing local markdown file requires access to github API" + echo "Please make sure curl is installed and check your network connectivity" + exit 1 + fi + if [ "$rawhtml" == "XXRateLimitXX" ]; then + echo "Parsing local markdown file requires access to github API" + echo "Error: You exceeded the hourly limit. See: https://developer.github.com/v3/#rate-limiting" + TOKEN_FILE="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/token.txt" + echo "or place GitHub auth token here: ${TOKEN_FILE}" + exit 1 + fi + local toc=`echo "$rawhtml" | gh_toc_grab "$gh_src_copy" "$indent"` + echo "$toc" + if [ "$need_replace" = "yes" ]; then + if grep -Fxq "" "$gh_src" && grep -Fxq "" "$gh_src"; then + echo "Found markers" + else + echo "You don't have or in your file...exiting" + exit 1 + fi + local ts="<\!--ts-->" + local te="<\!--te-->" + local dt=`date +'%F_%H%M%S'` + local ext=".orig.${dt}" + local toc_path="${gh_src}.toc.${dt}" + local toc_createdby="" + local toc_footer="" + # http://fahdshariff.blogspot.ru/2012/12/sed-mutli-line-replacement-between-two.html + # clear old TOC + sed -i${ext} "/${ts}/,/${te}/{//!d;}" "$gh_src" + # create toc file + echo "${toc}" > "${toc_path}" + if [ "${no_footer}" != "yes" ]; then + echo -e "\n${toc_createdby}\n${toc_footer}\n" >> "$toc_path" + fi + + # insert toc file + if ! sed --version > /dev/null 2>&1; then + sed -i "" "/${ts}/r ${toc_path}" "$gh_src" + else + sed -i "/${ts}/r ${toc_path}" "$gh_src" + fi + echo + if [ "${no_backup}" = "yes" ]; then + rm "$toc_path" "$gh_src$ext" + fi + echo "!! TOC was added into: '$gh_src'" + if [ -z "${no_backup}" ]; then + echo "!! Origin version of the file: '${gh_src}${ext}'" + echo "!! TOC added into a separate file: '${toc_path}'" + fi + echo + fi + fi +} + +# +# Grabber of the TOC from rendered html +# +# $1 - a source url of document. +# It's need if TOC is generated for multiple documents. +# $2 - number of spaces used to indent. +# +gh_toc_grab() { + + href_regex="/href=\"[^\"]+?\"/" + common_awk_script=' + modified_href = "" + split(href, chars, "") + for (i=1;i <= length(href); i++) { + c = chars[i] + res = "" + if (c == "+") { + res = " " + } else { + if (c == "%") { + res = "\\x" + } else { + res = c "" + } + } + modified_href = modified_href res + } + print sprintf("%*s", (level-1)*'"$2"', "") "* [" text "](" gh_url modified_href ")" + ' + if [ `uname -s` == "OS/390" ]; then + grepcmd="pcregrep -o" + echoargs="" + awkscript='{ + level = substr($0, 3, 1) + text = substr($0, match($0, /<\/span><\/a>[^<]*<\/h/)+11, RLENGTH-14) + href = substr($0, match($0, '$href_regex')+6, RLENGTH-7) + '"$common_awk_script"' + }' + else + grepcmd="grep -Eo" + echoargs="-e" + awkscript='{ + level = substr($0, 3, 1) + text = substr($0, match($0, /<\/span><\/a>.*<\/h/)+11, RLENGTH-14) + href = substr($0, match($0, '$href_regex')+6, RLENGTH-7) + '"$common_awk_script"' + }' + fi + + # if closed is on the new line, then move it on the prev line + # for example: + # was: The command foo1 + # + # became: The command foo1 + sed -e ':a' -e 'N' -e '$!ba' -e 's/\n<\/h/<\/h/g' | + + # Sometimes a line can start with . Fix that. + sed -e ':a' -e 'N' -e '$!ba' -e 's/\n//g' | sed 's/<\/code>//g' | + + # remove g-emoji + sed 's/]*[^<]*<\/g-emoji> //g' | + + # now all rows are like: + #

...

/dev/null`; then + echo `$tool --version | head -n 1` + else + echo "not installed" + fi + done +} + +show_help() { + local app_name=$(basename "$0") + echo "GitHub TOC generator ($app_name): $gh_toc_version" + echo "" + echo "Usage:" + echo " $app_name [options] src [src] Create TOC for a README file (url or local path)" + echo " $app_name - Create TOC for markdown from STDIN" + echo " $app_name --help Show help" + echo " $app_name --version Show version" + echo "" + echo "Options:" + echo " --indent Set indent size. Default: 3." + echo " --insert Insert new TOC into original file. For local files only. Default: false." + echo " See https://github.com/ekalinin/github-markdown-toc/issues/41 for details." + echo " --no-backup Remove backup file. Set --insert as well. Default: false." + echo " --hide-footer Do not write date & author of the last TOC update. Set --insert as well. Default: false." + echo " --skip-header Hide entry of the topmost headlines. Default: false." + echo " See https://github.com/ekalinin/github-markdown-toc/issues/125 for details." + echo "" +} + +# +# Options handlers +# +gh_toc_app() { + local need_replace="no" + local indent=3 + + if [ "$1" = '--help' ] || [ $# -eq 0 ] ; then + show_help + return + fi + + if [ "$1" = '--version' ]; then + show_version + return + fi + + if [ "$1" = '--indent' ]; then + indent="$2" + shift 2 + fi + + if [ "$1" = "-" ]; then + if [ -z "$TMPDIR" ]; then + TMPDIR="/tmp" + elif [ -n "$TMPDIR" -a ! -d "$TMPDIR" ]; then + mkdir -p "$TMPDIR" + fi + local gh_tmp_md + if [ `uname -s` == "OS/390" ]; then + local timestamp=$(date +%m%d%Y%H%M%S) + gh_tmp_md="$TMPDIR/tmp.$timestamp" + else + gh_tmp_md=$(mktemp $TMPDIR/tmp.XXXXXX) + fi + while read input; do + echo "$input" >> "$gh_tmp_md" + done + gh_toc_md2html "$gh_tmp_md" | gh_toc_grab "" "$indent" + return + fi + + if [ "$1" = '--insert' ]; then + need_replace="yes" + shift + fi + + if [ "$1" = '--no-backup' ]; then + need_replace="yes" + no_backup="yes" + shift + fi + + if [ "$1" = '--hide-footer' ]; then + need_replace="yes" + no_footer="yes" + shift + fi + + if [ "$1" = '--skip-header' ]; then + skip_header="yes" + shift + fi + + + for md in "$@" + do + echo "" + gh_toc "$md" "$#" "$need_replace" "$no_backup" "$no_footer" "$indent" "$skip_header" + done + + echo "" + echo "" +} + +# +# Entry point +# +gh_toc_app "$@" diff --git a/docs/images/nf-scil_logo_dark.png b/docs/images/nf-scil_logo_dark.png new file mode 100644 index 000000000..f285ee7bd --- /dev/null +++ b/docs/images/nf-scil_logo_dark.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:724780909e5e6cf4aca0694dabd6b5d7008108d51918f47ca1a70ab1e015e9a2 +size 139936 diff --git a/docs/images/nf-scil_logo_dark.svg b/docs/images/nf-scil_logo_dark.svg new file mode 100644 index 000000000..d52f4d765 --- /dev/null +++ b/docs/images/nf-scil_logo_dark.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/nf-scil_logo_light.png b/docs/images/nf-scil_logo_light.png new file mode 100644 index 000000000..671fdd5f6 --- /dev/null +++ b/docs/images/nf-scil_logo_light.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:165fe77b3c840f0e0c60282308f8dedfa49c24251d8aad03ce712637e090617b +size 139225 diff --git a/docs/images/nf-scil_logo_light.svg b/docs/images/nf-scil_logo_light.svg new file mode 100644 index 000000000..639deec45 --- /dev/null +++ b/docs/images/nf-scil_logo_light.svg @@ -0,0 +1 @@ + \ No newline at end of file From 410240366fd62430fcd1ccb66a7d1807b38306c5 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 7 Dec 2023 20:46:17 +0000 Subject: [PATCH 11/41] Add initial version of workflows ported from nf-core to do auto linting and testing --- .github/workflows/code_linting.yml | 2 +- .github/workflows/test.yml | 257 +++++++++++++++++++++++++++++ README.md | 7 + 3 files changed, 265 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/code_linting.yml b/.github/workflows/code_linting.yml index ad80f9f83..9c30b4a77 100644 --- a/.github/workflows/code_linting.yml +++ b/.github/workflows/code_linting.yml @@ -67,4 +67,4 @@ jobs: git add . git status git commit -m "[automated] Fix linting with Prettier" - git push \ No newline at end of file + git push diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..9e25fc109 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,257 @@ +name: Run tests +on: + push: + branches: [master] + pull_request: + branches: [master] + merge_group: + types: [checks_requested] + branches: [master] + +# Cancel if a newer run is started +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + NXF_SINGULARITY_CACHEDIR: ${{ github.workspace }}/.singularity + NXF_SINGULARITY_LIBRARYDIR: ${{ github.workspace }}/.singularity + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.0 + # FIXME Flip this off once we get to less than a couple hundred. Adding + # this so it will only run against changed files. It'll make it much + # easier to fix these as they come up rather than everything at once. + with: + extra_args: "" + + prettier: + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install NodeJS + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Install Prettier + run: npm install -g prettier + + - name: Run Prettier --check + run: prettier --check . + + editorconfig: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Install editorconfig-checker + run: npm install -g editorconfig-checker + + - name: Run ECLint check + run: editorconfig-checker -exclude README.md $(git ls-files | grep -v test) + + pytest-changes: + name: pytest-changes + runs-on: ubuntu-latest + outputs: + # Expose matched filters as job 'modules' output variable + modules: ${{ steps.filter.outputs.changes }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 2 # To retrieve the preceding commit. + + # TODO: change back to using dorny/paths-filter when https://github.com/dorny/paths-filter/pull/133 is implemented + - uses: mirpedrol/paths-filter@main + id: filter + with: + filters: "tests/config/pytest_modules.yml" + token: "" + + nf-test-changes: + name: nf-test-changes + runs-on: ubuntu-latest + outputs: + # Expose matched filters as job 'modules' output variable + modules: ${{ steps.filter.outputs.changes }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 2 # To retrieve the preceding commit. + + - name: Combine all tags.yml files + id: get_tags + run: find . -name "tags.yml" -not -path "./.github/*" -exec cat {} + > .github/tags.yml + + - name: debug + run: cat .github/tags.yml + + # TODO: change back to using dorny/paths-filter when https://github.com/dorny/paths-filter/pull/133 is implemented + - uses: mirpedrol/paths-filter@main + id: filter + with: + filters: ".github/tags.yml" + token: "" + + nf-core-lint: + runs-on: ubuntu-latest + name: nf-core-lint + needs: [pytest-changes, nf-test-changes] + if: ${{ (needs.pytest-changes.outputs.modules != '[]') || (needs.nf-test-changes.outputs.modules != '[]') }} + strategy: + fail-fast: false + matrix: + tags: + [ + "${{ fromJson(needs.pytest-changes.outputs.modules) }}", + "${{ fromJson(needs.nf-test-changes.outputs.modules) }}", + ] + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - uses: actions/cache@v3 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Install pip + run: python -m pip install --upgrade pip + + - uses: actions/setup-java@v3 + with: + distribution: "temurin" + java-version: "17" + - name: Setup Nextflow + uses: nf-core/setup-nextflow@v1 + + - name: Install nf-core tools development version + run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev + + - name: Lint module ${{ matrix.tags }} + run: nf-core modules lint ${{ matrix.tags }} + if: ${{ !startsWith(matrix.tags, 'subworkflows/') }} + + - name: Remove substring from matrix.tags + id: remove_substring + run: echo subworkflow_names=$(echo "${{ matrix.tags }}" | sed 's/subworkflows\///g') >> $GITHUB_OUTPUT + + - name: Lint subworkflow ${{ matrix.tags }} + run: nf-core subworkflows lint ${{steps.remove_substring.outputs.subworkflow_names}} + if: ${{ startsWith(matrix.tags, 'subworkflows/') }} + + pytest: + runs-on: ubuntu-latest + name: pytest + needs: [pytest-changes] + if: needs.pytest-changes.outputs.modules != '[]' + strategy: + fail-fast: false + matrix: + tags: ["${{ fromJson(needs.pytest-changes.outputs.modules) }}"] + profile: ["docker", "singularity"] + exclude: + - tags: "nf-test" + env: + NXF_ANSI_LOG: false + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - name: Install Python dependencies + run: python -m pip install --upgrade pip pytest-workflow cryptography + + - uses: actions/setup-java@v3 + with: + distribution: "temurin" + java-version: "17" + + - name: Setup Nextflow + uses: nf-core/setup-nextflow@v1 + + - name: Setup apptainer + if: matrix.profile == 'singularity' + uses: eWaterCycle/setup-apptainer@main + + - name: Set up Singularity + if: matrix.profile == 'singularity' + run: | + mkdir -p $NXF_SINGULARITY_CACHEDIR + mkdir -p $NXF_SINGULARITY_LIBRARYDIR + + + # Test the module + - name: Run pytest-workflow + # only use one thread for pytest-workflow to avoid race condition on conda cache. + run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware --color=yes + + - name: Output log on failure + if: failure() + run: | + sudo apt-get update > /dev/null + sudo apt-get install bat > /dev/null + batcat --decorations=always --color=always /home/runner/pytest_workflow_*/*/log.{out,err} + + - name: Setting global variables + uses: actions/github-script@v6 + id: parsed + with: + script: | + return '${{ matrix.tags }}'.toLowerCase().replaceAll(/\//g, '-').trim('-').trim('"') + result-encoding: string + + - name: Upload logs on failure + if: failure() + uses: actions/upload-artifact@v3 + with: + name: logs-${{ matrix.profile }}-${{ steps.parsed.outputs.result }} + path: | + /home/runner/pytest_workflow_*/*/.nextflow.log + /home/runner/pytest_workflow_*/*/log.out + /home/runner/pytest_workflow_*/*/log.err + /home/runner/pytest_workflow_*/*/work + !/home/runner/pytest_workflow_*/*/work/conda + !/home/runner/pytest_workflow_*/*/work/singularity + !${{ github.workspace }}/.singularity + + confirm-pass: + runs-on: ubuntu-latest + needs: [prettier, editorconfig, pytest-changes, nf-core-lint, pytest, nf-test-changes] + if: always() + steps: + - name: All tests ok + if: ${{ success() || !contains(needs.*.result, 'failure') }} + run: exit 0 + - name: One or more tests failed + if: ${{ contains(needs.*.result, 'failure') }} + run: exit 1 + + - name: debug-print + if: always() + run: | + echo "toJSON(needs) = ${{ toJSON(needs) }}" + echo "toJSON(needs.*.result) = ${{ toJSON(needs.*.result) }}" diff --git a/README.md b/README.md index e6df325bb..da5c89ba0 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,13 @@ Sublime's custom image

+[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) +[![Imports: nf-core](https://img.shields.io/badge/nf--core-nf?label=import&style=flat&labelColor=ef8336&color=24B064)](https://pycqa.github.io/nf-core/) +[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) +[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) +![Code Linting](https://github.com/scilus/nf-scil/workflows/Code%20Linting/badge.svg) +![Modules Tests](https://github.com/scilus/nf-scil/workflows/Test%20All%20Modules/badge.svg) + Welcome to `nf-scil` ! A **Nextflow** modules and workflows repository for neuroimaging maintained by the [SCIL team](https://scil-documentation.readthedocs.io/en/latest/). The primary focus of the library is to provide pre-built processes and processing sequences for From e087d9f95db449699d6768acedc394b536adc479 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 7 Dec 2023 20:55:08 +0000 Subject: [PATCH 12/41] fix to nf-core commands --- .github/workflows/test.yml | 43 +++++++------------------------------- 1 file changed, 8 insertions(+), 35 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9e25fc109..7ff87045b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -81,43 +81,17 @@ jobs: filters: "tests/config/pytest_modules.yml" token: "" - nf-test-changes: - name: nf-test-changes - runs-on: ubuntu-latest - outputs: - # Expose matched filters as job 'modules' output variable - modules: ${{ steps.filter.outputs.changes }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 2 # To retrieve the preceding commit. - - - name: Combine all tags.yml files - id: get_tags - run: find . -name "tags.yml" -not -path "./.github/*" -exec cat {} + > .github/tags.yml - - - name: debug - run: cat .github/tags.yml - - # TODO: change back to using dorny/paths-filter when https://github.com/dorny/paths-filter/pull/133 is implemented - - uses: mirpedrol/paths-filter@main - id: filter - with: - filters: ".github/tags.yml" - token: "" - nf-core-lint: runs-on: ubuntu-latest name: nf-core-lint - needs: [pytest-changes, nf-test-changes] - if: ${{ (needs.pytest-changes.outputs.modules != '[]') || (needs.nf-test-changes.outputs.modules != '[]') }} + needs: [pytest-changes] + if: ${{ (needs.pytest-changes.outputs.modules != '[]') }} strategy: fail-fast: false matrix: tags: [ - "${{ fromJson(needs.pytest-changes.outputs.modules) }}", - "${{ fromJson(needs.nf-test-changes.outputs.modules) }}", + "${{ fromJson(needs.pytest-changes.outputs.modules) }}" ] steps: - uses: actions/checkout@v4 @@ -144,11 +118,11 @@ jobs: - name: Setup Nextflow uses: nf-core/setup-nextflow@v1 - - name: Install nf-core tools development version - run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev + - name: Install nf-core tools + run: python -m pip install --upgrade --force-reinstall nf-core - name: Lint module ${{ matrix.tags }} - run: nf-core modules lint ${{ matrix.tags }} + run: nf-core modules --git-remote https://github.com/scilus/nf-scil.git lint ${{ matrix.tags }} if: ${{ !startsWith(matrix.tags, 'subworkflows/') }} - name: Remove substring from matrix.tags @@ -156,7 +130,7 @@ jobs: run: echo subworkflow_names=$(echo "${{ matrix.tags }}" | sed 's/subworkflows\///g') >> $GITHUB_OUTPUT - name: Lint subworkflow ${{ matrix.tags }} - run: nf-core subworkflows lint ${{steps.remove_substring.outputs.subworkflow_names}} + run: nf-core subworkflows --git-remote https://github.com/scilus/nf-scil.git lint ${{steps.remove_substring.outputs.subworkflow_names}} if: ${{ startsWith(matrix.tags, 'subworkflows/') }} pytest: @@ -203,7 +177,6 @@ jobs: mkdir -p $NXF_SINGULARITY_CACHEDIR mkdir -p $NXF_SINGULARITY_LIBRARYDIR - # Test the module - name: Run pytest-workflow # only use one thread for pytest-workflow to avoid race condition on conda cache. @@ -240,7 +213,7 @@ jobs: confirm-pass: runs-on: ubuntu-latest - needs: [prettier, editorconfig, pytest-changes, nf-core-lint, pytest, nf-test-changes] + needs: [prettier, editorconfig, pytest-changes, nf-core-lint, pytest] if: always() steps: - name: All tests ok From f2f4dd6b9be4ecacab9455afb6e12e1a0f3a86d7 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 8 Jan 2024 17:38:02 +0000 Subject: [PATCH 13/41] fix workflows --- .github/workflows/test.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7ff87045b..9343d9a70 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,12 +1,12 @@ name: Run tests on: push: - branches: [master] + branches: [main] pull_request: - branches: [master] + branches: [main] merge_group: types: [checks_requested] - branches: [master] + branches: [main] # Cancel if a newer run is started concurrency: From 8c001424c0ada22ad7afe9c424cf3a0c2e02753e Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 8 Jan 2024 21:05:03 +0000 Subject: [PATCH 14/41] add missing files --- .pre-commit-config.yaml | 28 ++++++++++++++++++++++++++++ requirements.txt | 20 ++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 .pre-commit-config.yaml create mode 100644 requirements.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..ad23a3c89 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +repos: + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v2.7.1" + hooks: + - id: prettier + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.0 + hooks: + - id: pyupgrade + args: [--py38-plus] + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.7.1" # Use the sha / tag you want to point at + hooks: + - id: mypy + additional_dependencies: + - types-PyYAML + - types-requests + - types-jsonschema + - types-Markdown + - types-setuptools diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..add52f4bc --- /dev/null +++ b/requirements.txt @@ -0,0 +1,20 @@ +click +filetype +GitPython +jinja2 +jsonschema>=3.0 +markdown>=3.3 +packaging +pre-commit +prompt_toolkit>=3.0.3 +pytest>=7.0.0 +pytest-workflow>=1.6.0 +pyyaml +questionary>=1.8.0 +refgenie +requests +requests_cache +rich-click>=1.6.1 +rich>=13.3.1 +tabulate +pdiff From 8239658992dd7d7bfe0a84b8d6754d87f6d30978 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 8 Jan 2024 21:32:49 +0000 Subject: [PATCH 15/41] update config --- .prettierignore | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.prettierignore b/.prettierignore index 4cd77bb4e..701ff1e7b 100644 --- a/.prettierignore +++ b/.prettierignore @@ -3,5 +3,8 @@ adaptivecard.json slackreport.json docs/api/_build testing -nf_core/module-template/modules/meta.yml -nf_core/module-template/tests/test.yml +nf_core/module-template/meta.yml +nf_core/module-template/tests/tags.yml +nf_core/subworkflow-template/tests/tags.yml +.github +.devcontainer From 717c057d251a7aeb7573de86c04962227bb53580 Mon Sep 17 00:00:00 2001 From: nf-scil-bot Date: Mon, 8 Jan 2024 21:34:11 +0000 Subject: [PATCH 16/41] [automated] Fix linting with Prettier --- .nf-core.yml | 1 - README.md | 372 ++--- docs/MODULE.md | 187 ++- docs/SCILPY_DATA.md | 1322 ++++++++--------- docs/pull_request_template.md | 6 +- modules/nf-scil/betcrop/fslbetcrop/meta.yml | 2 +- modules/nf-scil/denoising/mppca/meta.yml | 2 +- modules/nf-scil/preproc/gibbs/meta.yml | 2 +- modules/nf-scil/preproc/n4/meta.yml | 4 +- modules/nf-scil/reconst/dtimetrics/meta.yml | 14 +- modules/nf-scil/reconst/frf/meta.yml | 2 +- modules/nf-scil/reconst/meanfrf/meta.yml | 4 +- modules/nf-scil/reconst/noddi/meta.yml | 2 +- modules/nf-scil/segmentation/fastseg/meta.yml | 2 +- .../segmentation/freesurferseg/meta.yml | 8 +- modules/nf-scil/testdata/scilpy/meta.yml | 6 +- modules/nf-scil/tracking/pfttracking/meta.yml | 6 +- modules/nf-scil/utils/extractb0/meta.yml | 2 +- nf-scil-extensions/.vscode/launch.json | 8 +- nf-scil-extensions/package.json | 98 +- .../vsc-extension-quickstart.md | 16 +- .../nf-scil/tracking/localtracking/test.yml | 4 +- 22 files changed, 1032 insertions(+), 1038 deletions(-) diff --git a/.nf-core.yml b/.nf-core.yml index 0824add44..86885b8d6 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -1,4 +1,3 @@ --- - repository_type: modules org_path: nf-scil diff --git a/README.md b/README.md index da5c89ba0..2acea97bb 100644 --- a/README.md +++ b/README.md @@ -1,186 +1,186 @@ -

- Sublime's custom image -

-

- Sublime's custom image -

- -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) -[![Imports: nf-core](https://img.shields.io/badge/nf--core-nf?label=import&style=flat&labelColor=ef8336&color=24B064)](https://pycqa.github.io/nf-core/) -[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) -[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) -![Code Linting](https://github.com/scilus/nf-scil/workflows/Code%20Linting/badge.svg) -![Modules Tests](https://github.com/scilus/nf-scil/workflows/Test%20All%20Modules/badge.svg) - -Welcome to `nf-scil` ! A **Nextflow** modules and workflows repository for neuroimaging -maintained by the [SCIL team](https://scil-documentation.readthedocs.io/en/latest/). The -primary focus of the library is to provide pre-built processes and processing sequences for -**diffusion Magnetic Resonance Imaging**, optimized for *Nextflow DLS2*, based on open-source -technologies and made easily available to pipeline's developers through the `nf-core` -framework. - -* [Using modules from nf-scil](README.md#using-modules-from-nf-scil) -* [Developing in nf-scil](README.md#developing-in-nf-scil) - * [Dependencies](README.md#dependencies) - * [Developer installation](README.md#developer-installation) - * [Loading the project's environment](README.md#loading-the-projects-environment) - * [Working with VS Code](README.md#working-with-vs-code) - * [Manual configuration of the VS Code project :](README.md#manual-configuration-of-the-vs-code-project-) - * [Configuration via the devcontainer :](README.md#configuration-via-the-devcontainer-) - * [Contributing to nf-scil](README.md#contributing-to-nf-scil) - * [Adding a new module to nf-scil](docs/MODULE.md#adding-a-new-module-to-nf-scil) - * [Generate the template](docs/MODULE.md#generate-the-template) - * [Edit the template](docs/MODULE.md#edit-the-template) - * [Editing the main](docs/MODULE.md#editing-modulesnf-scilcategorytoolmainnf-) - * [Editing the metadata](docs/MODULE.md#editing--modulesnf-scilcategorytoolmetayml-) - * [Editing the test cases](docs/MODULE.md#editing-testsmodulesnf-scilcategorytoolmainnf-) - * [Editing the tests configuration](docs/MODULE.md#editing-testsmodulesnf-scilcategorytoolnextflowconfig-) - * [Run the tests to generate the test metadata file](docs/MODULE.md#run-the-tests-to-generate-the-test-metadata-file) - * [Last safety test](docs/MODULE.md#last-safety-test) - * [Submit your PR](docs/MODULE.md#submit-your-pr) - * [Defining processes optional parameters](docs/MODULE.md#defining-processes-optional-parameters) - * [Test data infrastructure](docs/MODULE.md#test-data-infrastructure) - * [Using the .test_data directory](docs/MODULE.md#using-the-test_data-directory) - * [Using Scilpy Fetcher](docs/MODULE.md#using-scilpy-fetcher) - * [Running tests](README.md#running-tests) - -# Using modules from `nf-scil` - -To import modules from `nf-scil`, you first need to install [nf-core](https://github.com/nf-core/tools) -on your system (can be done simply using `pip install nf-core`). Once done, `nf-scil` -modules are imported using this command : - -```bash -nf-core modules \ - --git-remote https://github.com/scilus/nf-scil.git \ - install / -``` - -where you input the `` you want to import from the desired ``. To get -a list of the available modules, run : - -```bash -nf-core modules \ - --git-remote https://github.com/scilus/nf-scil.git \ - list remote -``` - -# Developing in `nf-scil` - -## Dependencies - -- Python ≥ 3.8, < 3.13 -- Java Runtime ≥ 11, ≤ 17 - - On Ubuntu, install `openjdk-jre-` packages -- Nextflow ≥ 21.04.3 - -> [!IMPORTANT] -> Nextflow might not detect the right `Java virtual machine` by default, more so if -> multiple versions of the runtime are installed. If so, you need to set the environment -> variable `JAVA_HOME` to target the right one. -> -> - Linux : look in `/usr/lib/jvm` for -> a folder named `java--openjdk-` and use it as `JAVA_HOME`. -> -> - MacOS : if the `Java jvm` is the preferential one, use `JAVA_HOME=$(/usr/libexec/java_home)`. -> Else, look into `/Library/Java/JavaVirtualMachines` for the folder with the correct -> runtime version (named `jdk_1.jdk`) and use the -> following : `/Library/Java/JavaVirtualMachines/dk_1.jdk/Contents/Home`. - -## Developer installation - -The project uses *poetry* to manage python dependencies. To install it using pipx, -run the following commands : - -``` -pip install pipx -pipx ensurepath -pipx install poetry -``` - -> [!NOTE] -> If the second command above fails, `pipx` cannot be found in the path. Prepend the - second command with `$(which python) -m` and rerun the whole block. - -> [!WARNING] -> Poetry doesn't like when other python environments are activated around it. Make - sure to deactivate any before calling `poetry` commands. - -Once done, install the project with : - -``` -poetry install -``` - -## Loading the project's environment - -> [!IMPORTANT] -> Make sure no python environment is activated before running commands ! - -The project scripts and dependencies can be accessed using : - -``` -poetry shell -``` - -which will activate the project's python environment in the current shell. - -> [!NOTE] -> You will know the poetry environment is activated by looking at your shell. The - input line should be prefixed by : `(nf-scil-tools-py)`, with `` - being the actual Python version used in the environment. - -To exit the environment, simply enter the `exit` command in the shell. - -> [!IMPORTANT] -> Do not use traditional deactivation (calling `deactivate`), since it does not relinquish - the environment gracefully, making it so you won't be able to reactivate it without - exiting the shell. - -## Working with VS Code - -### Manual configuration of the VS Code project : - -First install the `nf-scil-extensions` package. You can find it easily on the [extension -marketplace](https://marketplace.visualstudio.com/items?itemName=AlexVCaron.nf-scil-extensions). - -### Configuration via the `devcontainer` : - -The `devcontainer` definition for the project contains all required dependencies and setup -steps are automatically executed. Open the cloned repository in *VS Code* and click on the -arrow box in the lower left corner, to get a prompt to `Reopen in container`. The procedure -will start a docker build, wait for a few minutes and enjoy your fully configured development -environment. - -- Available in the container : - - `nf-scil`, `nf-core` all accessible through the terminal, which is configured to load - the `poetry` environment in shells automatically - - `git`, `git-lfs`, `github-cli` - - `curl`, `wget`, `apt-get` - - `nextflow`, `docker`, `tmux` - -- Available in the VS Code IDE through extensions : - - Docker images and containers management - - Python and C++ linting, building and debugging tools - - Github Pull Requests management - - Github flavored markdown previewing - -## Contributing to `nf-scil` - -If you want to propose a new `module` to the repository, follow the guidelines in the -[module creation](./docs/MODULE.md) documentation. we follow standards closely -aligned with `nf-core`, with some exceptions on process atomicity and how test data is -handled. Modules that don't abide to them won't be accepted and PR containing them will -be closed automatically. - -## Running tests - -Tests are run through `nf-core`, using the command : - -```bash -nf-core modules \ - --git-remote https://github.com/scilus/nf-scil.git \ - test -``` - -The tool can be omitted to run tests for all modules in a category. +

+ Sublime's custom image +

+

+ Sublime's custom image +

+ +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) +[![Imports: nf-core](https://img.shields.io/badge/nf--core-nf?label=import&style=flat&labelColor=ef8336&color=24B064)](https://pycqa.github.io/nf-core/) +[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) +[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) +![Code Linting](https://github.com/scilus/nf-scil/workflows/Code%20Linting/badge.svg) +![Modules Tests](https://github.com/scilus/nf-scil/workflows/Test%20All%20Modules/badge.svg) + +Welcome to `nf-scil` ! A **Nextflow** modules and workflows repository for neuroimaging +maintained by the [SCIL team](https://scil-documentation.readthedocs.io/en/latest/). The +primary focus of the library is to provide pre-built processes and processing sequences for +**diffusion Magnetic Resonance Imaging**, optimized for _Nextflow DLS2_, based on open-source +technologies and made easily available to pipeline's developers through the `nf-core` +framework. + +- [Using modules from nf-scil](README.md#using-modules-from-nf-scil) +- [Developing in nf-scil](README.md#developing-in-nf-scil) + - [Dependencies](README.md#dependencies) + - [Developer installation](README.md#developer-installation) + - [Loading the project's environment](README.md#loading-the-projects-environment) + - [Working with VS Code](README.md#working-with-vs-code) + - [Manual configuration of the VS Code project :](README.md#manual-configuration-of-the-vs-code-project-) + - [Configuration via the devcontainer :](README.md#configuration-via-the-devcontainer-) + - [Contributing to nf-scil](README.md#contributing-to-nf-scil) + - [Adding a new module to nf-scil](docs/MODULE.md#adding-a-new-module-to-nf-scil) + - [Generate the template](docs/MODULE.md#generate-the-template) + - [Edit the template](docs/MODULE.md#edit-the-template) + - [Editing the main](docs/MODULE.md#editing-modulesnf-scilcategorytoolmainnf-) + - [Editing the metadata](docs/MODULE.md#editing--modulesnf-scilcategorytoolmetayml-) + - [Editing the test cases](docs/MODULE.md#editing-testsmodulesnf-scilcategorytoolmainnf-) + - [Editing the tests configuration](docs/MODULE.md#editing-testsmodulesnf-scilcategorytoolnextflowconfig-) + - [Run the tests to generate the test metadata file](docs/MODULE.md#run-the-tests-to-generate-the-test-metadata-file) + - [Last safety test](docs/MODULE.md#last-safety-test) + - [Submit your PR](docs/MODULE.md#submit-your-pr) + - [Defining processes optional parameters](docs/MODULE.md#defining-processes-optional-parameters) + - [Test data infrastructure](docs/MODULE.md#test-data-infrastructure) + - [Using the .test_data directory](docs/MODULE.md#using-the-test_data-directory) + - [Using Scilpy Fetcher](docs/MODULE.md#using-scilpy-fetcher) + - [Running tests](README.md#running-tests) + +# Using modules from `nf-scil` + +To import modules from `nf-scil`, you first need to install [nf-core](https://github.com/nf-core/tools) +on your system (can be done simply using `pip install nf-core`). Once done, `nf-scil` +modules are imported using this command : + +```bash +nf-core modules \ + --git-remote https://github.com/scilus/nf-scil.git \ + install / +``` + +where you input the `` you want to import from the desired ``. To get +a list of the available modules, run : + +```bash +nf-core modules \ + --git-remote https://github.com/scilus/nf-scil.git \ + list remote +``` + +# Developing in `nf-scil` + +## Dependencies + +- Python ≥ 3.8, < 3.13 +- Java Runtime ≥ 11, ≤ 17 + - On Ubuntu, install `openjdk-jre-` packages +- Nextflow ≥ 21.04.3 + +> [!IMPORTANT] +> Nextflow might not detect the right `Java virtual machine` by default, more so if +> multiple versions of the runtime are installed. If so, you need to set the environment +> variable `JAVA_HOME` to target the right one. +> +> - Linux : look in `/usr/lib/jvm` for +> a folder named `java--openjdk-` and use it as `JAVA_HOME`. +> - MacOS : if the `Java jvm` is the preferential one, use `JAVA_HOME=$(/usr/libexec/java_home)`. +> Else, look into `/Library/Java/JavaVirtualMachines` for the folder with the correct +> runtime version (named `jdk_1.jdk`) and use the +> following : `/Library/Java/JavaVirtualMachines/dk_1.jdk/Contents/Home`. + +## Developer installation + +The project uses _poetry_ to manage python dependencies. To install it using pipx, +run the following commands : + +``` +pip install pipx +pipx ensurepath +pipx install poetry +``` + +> [!NOTE] +> If the second command above fails, `pipx` cannot be found in the path. Prepend the +> second command with `$(which python) -m` and rerun the whole block. + +> [!WARNING] +> Poetry doesn't like when other python environments are activated around it. Make +> sure to deactivate any before calling `poetry` commands. + +Once done, install the project with : + +``` +poetry install +``` + +## Loading the project's environment + +> [!IMPORTANT] +> Make sure no python environment is activated before running commands ! + +The project scripts and dependencies can be accessed using : + +``` +poetry shell +``` + +which will activate the project's python environment in the current shell. + +> [!NOTE] +> You will know the poetry environment is activated by looking at your shell. The +> input line should be prefixed by : `(nf-scil-tools-py)`, with `` +> being the actual Python version used in the environment. + +To exit the environment, simply enter the `exit` command in the shell. + +> [!IMPORTANT] +> Do not use traditional deactivation (calling `deactivate`), since it does not relinquish +> the environment gracefully, making it so you won't be able to reactivate it without +> exiting the shell. + +## Working with VS Code + +### Manual configuration of the VS Code project : + +First install the `nf-scil-extensions` package. You can find it easily on the [extension +marketplace](https://marketplace.visualstudio.com/items?itemName=AlexVCaron.nf-scil-extensions). + +### Configuration via the `devcontainer` : + +The `devcontainer` definition for the project contains all required dependencies and setup +steps are automatically executed. Open the cloned repository in _VS Code_ and click on the +arrow box in the lower left corner, to get a prompt to `Reopen in container`. The procedure +will start a docker build, wait for a few minutes and enjoy your fully configured development +environment. + +- Available in the container : + + - `nf-scil`, `nf-core` all accessible through the terminal, which is configured to load + the `poetry` environment in shells automatically + - `git`, `git-lfs`, `github-cli` + - `curl`, `wget`, `apt-get` + - `nextflow`, `docker`, `tmux` + +- Available in the VS Code IDE through extensions : + - Docker images and containers management + - Python and C++ linting, building and debugging tools + - Github Pull Requests management + - Github flavored markdown previewing + +## Contributing to `nf-scil` + +If you want to propose a new `module` to the repository, follow the guidelines in the +[module creation](./docs/MODULE.md) documentation. we follow standards closely +aligned with `nf-core`, with some exceptions on process atomicity and how test data is +handled. Modules that don't abide to them won't be accepted and PR containing them will +be closed automatically. + +## Running tests + +Tests are run through `nf-core`, using the command : + +```bash +nf-core modules \ + --git-remote https://github.com/scilus/nf-scil.git \ + test +``` + +The tool can be omitted to run tests for all modules in a category. diff --git a/docs/MODULE.md b/docs/MODULE.md index caf6cad24..7f0976da3 100644 --- a/docs/MODULE.md +++ b/docs/MODULE.md @@ -30,58 +30,56 @@ nf-core modules create \ You will still have to interact with the **bioconda** prompt, still select `no`. > [!NOTE] -> Once used to the conventions, adding `--empty-template` to the command will disable - auto-generation of comments, examples and TODOs and can be a time-saver. +> Once used to the conventions, adding `--empty-template` to the command will disable +> auto-generation of comments, examples and TODOs and can be a time-saver. ## Edit the template -The template has to be edited in order to work with `nf-scil` and still be importable -through `nf-core`. Refer to the `betcrop/fslbetcrop` module for an example as it should +The template has to be edited in order to work with `nf-scil` and still be importable +through `nf-core`. Refer to the `betcrop/fslbetcrop` module for an example as it should already follow all guidelines. You will find related files in : - `modules/nf-scil/betcrop/fslbetcrop` - `tests/modules/nf-scil/betcrop/fslbetcrop` - - ### Editing `./modules/nf-scil///main.nf` : - Remove the line `conda "YOUR-TOOL-HERE"`. -- If the process uses the `scilus` container, use the following replacements, +- If the process uses the `scilus` container, use the following replacements, else remove the whole section. - `depot.galaxyproject.org...` ⟹ `scil.usherbrooke.ca/containers/scilus_1.6.0.sif` + `depot.galaxyproject.org...` ⟹ `scil.usherbrooke.ca/containers/scilus_1.6.0.sif` - `biocontainers/YOUR-TOOL-HERE` ⟹ `scilus/scilus:1.6.0` + `biocontainers/YOUR-TOOL-HERE` ⟹ `scilus/scilus:1.6.0` - Add your inputs in the `input:` section : > [!NOTE] - > Each line below `input:` defines an input channel for the process. A channel can - receive one (`val`, `path`, ...) or more (`tuple`) values per item. + > Each line below `input:` defines an input channel for the process. A channel can + > receive one (`val`, `path`, ...) or more (`tuple`) values per item. > [!IMPORTANT] - > When possible, add all optional input parameters (not data !) to `task.ext` instead of - listing them in the `input:` section (see [this section](#defining-processes-optional-parameters) - for more information). + > When possible, add all optional input parameters (not data !) to `task.ext` instead of + > listing them in the `input:` section (see [this section](#defining-processes-optional-parameters) + > for more information). - All inputs are assumed to be `required` by default. - If an input is scoped to a subject, the line MUST start with `tuple val(meta), `. - - An input `path` CAN be optional (though it is not officially supported). You simply - have to pass it an empty list `[]` for Nextflow to consider its value empty, but + - An input `path` CAN be optional (though it is not officially supported). You simply + have to pass it an empty list `[]` for Nextflow to consider its value empty, but correct. > [!IMPORTANT] - > If you decide an input `path` value is optional, add `/* optional, value = [] */` - aside the parameter (e.g. f1 is optional, so `path(f1) /* optional, value = [] */` - or even `tuple val(meta), path(f1) /* optional, value = [] */, path(...` are valid - syntaxes). This will make input lines long, but they will be detectable. When we - can define input tuples on multiple lines, we'll deal with this. + > If you decide an input `path` value is optional, add `/* optional, value = [] */` + > aside the parameter (e.g. f1 is optional, so `path(f1) /* optional, value = [] */` + > or even `tuple val(meta), path(f1) /* optional, value = [] */, path(...` are valid + > syntaxes). This will make input lines long, but they will be detectable. When we + > can define input tuples on multiple lines, we'll deal with this. - In the script section, before the script definition (in `""" """`), unpack the + In the script section, before the script definition (in `""" """`), unpack the optional argument into a `usable variable`. For a optional input `input1`, add : def optional_input1 = input1 ? "" : "" @@ -89,32 +87,32 @@ already follow all guidelines. You will find related files in : The variable `optional_input1` is the one to use in the script. > [!NOTE] - > At its most simple, a variable is `usable` if its conversion to a string is valid - in the script (e.g. : if a variable can be empty or null, then its conversion to an - empty string must be valid in the sense of the script for the variable to be considered - `usable`). + > At its most simple, a variable is `usable` if its conversion to a string is valid + > in the script (e.g. : if a variable can be empty or null, then its conversion to an + > empty string must be valid in the sense of the script for the variable to be considered + > `usable`). - Add all outputs in the `output` section : - - As for inputs, each line defines an output channel. If an output is scoped to a + - As for inputs, each line defines an output channel. If an output is scoped to a subject, the line MUST start with `tuple val(meta), `. - File extensions MUST ALWAYS be defined (e.g. `path("*.{nii,nii.gz}")`). > [!IMPORTANT] - > Each line MUST use `emit: ` to make its results available inside Nextflow using - a relevant `name`. Results are accessible using : `PROCESS_NAME.out.`. - + > Each line MUST use `emit: ` to make its results available inside Nextflow using + > a relevant `name`. Results are accessible using : `PROCESS_NAME.out.`. + > [!NOTE] > Optional outputs ARE possible, add `, optional: true` after the `emit: ` clause. - Fill the `script` section : - - Use the `prefix` variable to name the scoped output files. If needed, modify the + - Use the `prefix` variable to name the scoped output files. If needed, modify the variable definition in the groovy pre-script. - Define dependencies versions : - + In the versioning section at the bottom of the script : ```bash @@ -123,37 +121,38 @@ already follow all guidelines. You will find related files in : END_VERSIONS ``` + remove the lines in between the `cat` and the `END_VERSIONS` line. In it, add for each dependency a new line in the format : `: `. > [!NOTE] - > You can hard-bake the version as a number here, but if possible extract if from - the dependency dynamically. Refer to the `betcrop/fslbetcrop` module, in `main.nf` - for examples on how to extract the version number correctly. + > You can hard-bake the version as a number here, but if possible extract if from + > the dependency dynamically. Refer to the `betcrop/fslbetcrop` module, in `main.nf` + > for examples on how to extract the version number correctly. - Fill the `stub` section : Using the same conventions as for the `script` section, define a simple test stub : - - Call the helps of all scripts used, if possible. + - Call the helps of all scripts used, if possible. - - Call `touch ` to generate empty files for all required outputs. + - Call `touch ` to generate empty files for all required outputs. -### Editing `./modules/nf-scil///meta.yml` : +### Editing `./modules/nf-scil///meta.yml` : -Fill the sections you find relevant. There is a lot of metadata in this file, but we -don't need to specify them all. At least define the `keywords`, describe the process' +Fill the sections you find relevant. There is a lot of metadata in this file, but we +don't need to specify them all. At least define the `keywords`, describe the process' `inputs` and `outputs`, and add a `short documentation` for the tool(s) used in the process. > [!IMPORTANT] -> The `tool` documentation does not describe your module, but to the tools you use in - the module ! If you use scripts from `scilpy`, here you describe scilpy. If using - `ANTs`, describe ANts. Etcetera. +> The `tool` documentation does not describe your module, but to the tools you use in +> the module ! If you use scripts from `scilpy`, here you describe scilpy. If using +> `ANTs`, describe ANts. Etcetera. ### Editing `./tests/modules/nf-scil///main.nf` : -The module's test suite is a collection of workflows containing isolated test cases. You -can add as many more tests as your heart desire (not too much), in addition to the one +The module's test suite is a collection of workflows containing isolated test cases. You +can add as many more tests as your heart desire (not too much), in addition to the one provided. > [!IMPORTANT] @@ -161,32 +160,32 @@ provided. In any case, to get the test workflows working, do the following : -- Either modify the auto-generated `input` object to add your test data or replace it with - a *fetcher workflow*. You can do this at the end, when you have defined your test cases. - Refer to [this section](#test-data-infrastructure) to see which use case fits your tests +- Either modify the auto-generated `input` object to add your test data or replace it with + a _fetcher workflow_. You can do this at the end, when you have defined your test cases. + Refer to [this section](#test-data-infrastructure) to see which use case fits your tests better. ### Editing `./tests/modules/nf-scil///nextflow.config` : -You don't need to touch anything here, except if you have defined optional parameters -with `task.ext` and want to alter their values for some test cases. Refer to -[this section](#defining-processes-optional-parameters) to see how to scope those parameters +You don't need to touch anything here, except if you have defined optional parameters +with `task.ext` and want to alter their values for some test cases. Refer to +[this section](#defining-processes-optional-parameters) to see how to scope those parameters to specific tests using `selectors`. ## Run the tests to generate the test metadata file > [!WARNING] -> Verify you are located at the root of `nf-scil` (not inside modules) before +> Verify you are located at the root of `nf-scil` (not inside modules) before > running commands ! -Once the test data has been pushed to the desired location and been made available to the -test infrastructure using the relevant configurations, the test module has to be pre-tested -so output files that gets generated are checksum correctly. +Once the test data has been pushed to the desired location and been made available to the +test infrastructure using the relevant configurations, the test module has to be pre-tested +so output files that gets generated are checksum correctly. > [!IMPORTANT] -> The test infrastructure uses `pytest-workflow` to run the tests. It is `git-aware`, -> meaning that only files either `committed` or `staged` will be considered by -> the tests. To verify that your file will be loaded correctly, check that it is +> The test infrastructure uses `pytest-workflow` to run the tests. It is `git-aware`, +> meaning that only files either `committed` or `staged` will be considered by +> the tests. To verify that your file will be loaded correctly, check that it is > listed by `git ls-files`. Run : @@ -199,26 +198,26 @@ nf-core modules create-test-yml \ ``` -All the test case you defined will be run, watch out for errors ! Once everything runs -smoothly, look at the test metadata file produced : `tests/modules/nf-scil//test.yml` -and validate that ALL outputs produced by test cases have been caught. Their `md5sum` is +All the test case you defined will be run, watch out for errors ! Once everything runs +smoothly, look at the test metadata file produced : `tests/modules/nf-scil//test.yml` +and validate that ALL outputs produced by test cases have been caught. Their `md5sum` is critical to ensure future executions of your test produce valid outputs. ## Last safety test -You're mostly done ! If every tests passes, your module is ready ! Still, you have not tested -that `nf-core` is able to find and install your module in an actual pipeline. First, to test -this, your module must be pushed only to your repository, so ensure that. Next, you need to -either locate yourself in an already existing `DSL2` Nextflow pipeline, or create a `dummy` +You're mostly done ! If every tests passes, your module is ready ! Still, you have not tested +that `nf-core` is able to find and install your module in an actual pipeline. First, to test +this, your module must be pushed only to your repository, so ensure that. Next, you need to +either locate yourself in an already existing `DSL2` Nextflow pipeline, or create a `dummy` testing one. > [!NOTE] -> To be valid, your `DSL2` Nextflow pipeline must have a `modules/` directory, as well as a - `writable` or non-existent `.modules.yml` file. +> To be valid, your `DSL2` Nextflow pipeline must have a `modules/` directory, as well as a +> `writable` or non-existent `.modules.yml` file. > [!NOTE] -> A `dummy` pipeline is simply a directory containing an empty `modules/` directory and a - `main.nf` file with the following content : `workflow {}`. +> A `dummy` pipeline is simply a directory containing an empty `modules/` directory and a +> `main.nf` file with the following content : `workflow {}`. Run the following command, to try installing the module : @@ -229,52 +228,52 @@ nf-core module \ install / ``` -You'll get a message at the command line, indicating which `include` line to add to your -pipeline to use your module. If you do it, add the module to your pipeline, run it and +You'll get a message at the command line, indicating which `include` line to add to your +pipeline to use your module. If you do it, add the module to your pipeline, run it and validate it's working, and you're done ! > [!NOTE] -> If working with the `dummy`, don't bother running it, we say it's working, so you can - delete the test and submit the PR ! +> If working with the `dummy`, don't bother running it, we say it's working, so you can +> delete the test and submit the PR ! ## Submit your PR -Open a PR to the `nf-scil` repository master. We'll test everything, make sure it's +Open a PR to the `nf-scil` repository master. We'll test everything, make sure it's working and that code follows standards. > [!NOTE] > It's the perfect place to get new tools added to our containers, if need be ! -Once LGTM has been declared, wave to the maintainers and look at your hard work paying off. +Once LGTM has been declared, wave to the maintainers and look at your hard work paying off. PR merged ! # Defining processes optional parameters -Using the DLS2 module framework, we can define passage of optional parameters using a configuration -proprietary to the `process scope`, the `task.ext` mapping (or dictionary). In `nf-core`, the convention +Using the DLS2 module framework, we can define passage of optional parameters using a configuration +proprietary to the `process scope`, the `task.ext` mapping (or dictionary). In `nf-core`, the convention is to load `task.ext.args` with all optional parameters acceptable by the process. -This does not work perfectly for our use-cases, and instead, we use the whole `task.ext` as a +This does not work perfectly for our use-cases, and instead, we use the whole `task.ext` as a parameters map. To define an optional parameter `param1` through `task.ext`, add the following to the process script section, before the script definition (in `""" """`) : ```groovy -def args_for_cmd1 = task.ext.param1 ? " $task.ext.param1" +def args_for_cmd1 = task.ext.param1 ? " $task.ext.param1" : '' ``` -Then, use `args_for_cmd1` in the script. Defining the actual value for the parameters is done -by means of `.config` files, inside the `process` scope. A global affectation of the parameter +Then, use `args_for_cmd1` in the script. Defining the actual value for the parameters is done +by means of `.config` files, inside the `process` scope. A global affectation of the parameter is as simple as : -```groovy +```groovy process { task.ext.param1 = "" } ``` -Doing so will affect **ALL** processes. To scope to a specific process, use the +Doing so will affect **ALL** processes. To scope to a specific process, use the [process selectors](https://www.nextflow.io/docs/latest/config.html#process-selectors) (`withName:` or `withLabel:`) : @@ -286,7 +285,7 @@ process { } ``` -You can define the selector on multiple levels and use glob matching, making it so that +You can define the selector on multiple levels and use glob matching, making it so that it is possible to affect the processes inside a specific workflow as well : ```groovy @@ -309,20 +308,20 @@ process { # Test data infrastructure > [!WARNING] -> WORK IN PROGRESS, WILL CHANGE SOON-ISH. 2 temporary ways are available now and will be +> WORK IN PROGRESS, WILL CHANGE SOON-ISH. 2 temporary ways are available now and will be > deprecated when the **DVC** infrastructure is ready. ## Using the `.test_data` directory Some test datasets are available under the `.test_data` directory. You can use them as you wish, -but inspect them before you do, since some dataset have been lean down and could not fit the -reality of your test cases. **Do not add or modify data in this directory**. Tests packages are -separated into `heavy` and `light` categories depending on their filesize. Inside, they are divided +but inspect them before you do, since some dataset have been lean down and could not fit the +reality of your test cases. **Do not add or modify data in this directory**. Tests packages are +separated into `heavy` and `light` categories depending on their filesize. Inside, they are divided into relevant sub-categories (dwi, anat, ...). -To bind data to test cases using this infrastructure, it first has to be added to `tests/config/test_data.config` -in order to be visible. The configuration is a nesting of dictionaries, all test data -files must be added to the `params.test_data` of this structure, using this convention +To bind data to test cases using this infrastructure, it first has to be added to `tests/config/test_data.config` +in order to be visible. The configuration is a nesting of dictionaries, all test data +files must be added to the `params.test_data` of this structure, using this convention for the `dictionary key` : `params.test_data[][][]`. Thus, a new binding in `tests/config/test_data.config` should resemble the following @@ -351,7 +350,7 @@ params { ``` You then use `params.test_data[][][]` in your test cases to -attach the data to the test case, since the `params.test_data` collection is loaded +attach the data to the test case, since the `params.test_data` collection is loaded automatically. To do so, in a test workflow, define an `input` object : ``` @@ -371,7 +370,7 @@ and use it as input to the processes to test. ## Using Scilpy Fetcher The Scilpy Fetcher is a tool that allows you to download datasets from the Scilpy test data -depository. To use it, first include the *fetcher workflow* in your test's `main.nf` : +depository. To use it, first include the _fetcher workflow_ in your test's `main.nf` : ``` include { LOAD_TEST_DATA } from '../../../../../subworkflows/nf-scil/load_test_data/main' @@ -400,5 +399,5 @@ input = LOAD_TEST_DATA.out.test_data_directory > [!NOTE] > The subworkflow must be called individually in each test workflow, even if they download -> the same archives, since there is no mechanism to pass data channels to them from the +> the same archives, since there is no mechanism to pass data channels to them from the > outside. diff --git a/docs/SCILPY_DATA.md b/docs/SCILPY_DATA.md index d2b4dc5d0..0080c2ea3 100644 --- a/docs/SCILPY_DATA.md +++ b/docs/SCILPY_DATA.md @@ -3,664 +3,664 @@ Datasets available in Scilpy data. Entries are indexed by archive name. - - bids_json.zip - - result_real_dwi_real_rev_dwi_sbref.json - - result_real_dwi_real_rev_dwi.json - - result_complex_dwi_complex_rev_dwi_sbref.json - - result_real_dwi_epi.json - - result_complex_dwi_complex_rev_dwi.json - - result_complex_dwi_epi.json - - result_real_dwi_real_sbref.json - - result_complex_dwi_complex_sbref.json - - plot.zip - - atlas_brainnetome.nii.gz - - ad.nii.gz - - fa.nii.gz - - map_gm.nii.gz - - map_wm.nii.gz - - mask_gm.nii.gz - - mask_wm.nii.gz - - atlas_brainnetome.json - - ihMT.zip - - B1map.json - - B1map.nii.gz - - echo-1_acq-altnp_ihmt.json - - echo-1_acq-altnp_ihmt.nii.gz - - echo-1_acq-altpn_ihmt.json - - echo-1_acq-altpn_ihmt.nii.gz - - echo-1_acq-mtoff_ihmt.json - - echo-1_acq-mtoff_ihmt.nii.gz - - echo-1_acq-neg_ihmt.json - - echo-1_acq-neg_ihmt.nii.gz - - echo-1_acq-pos_ihmt.json - - echo-1_acq-pos_ihmt.nii.gz - - echo-1_acq-T1w_ihmt.json - - echo-1_acq-T1w_ihmt.nii.gz - - echo-2_acq-altnp_ihmt.json - - echo-2_acq-altnp_ihmt.nii.gz - - echo-2_acq-altpn_ihmt.json - - echo-2_acq-altpn_ihmt.nii.gz - - echo-2_acq-mtoff_ihmt.json - - echo-2_acq-mtoff_ihmt.nii.gz - - echo-2_acq-neg_ihmt.json - - echo-2_acq-neg_ihmt.nii.gz - - echo-2_acq-pos_ihmt.json - - echo-2_acq-pos_ihmt.nii.gz - - echo-2_acq-T1w_ihmt.json - - echo-2_acq-T1w_ihmt.nii.gz - - echo-3_acq-altnp_ihmt.json - - echo-3_acq-altnp_ihmt.nii.gz - - echo-3_acq-altpn_ihmt.json - - echo-3_acq-altpn_ihmt.nii.gz - - echo-3_acq-mtoff_ihmt.json - - echo-3_acq-mtoff_ihmt.nii.gz - - echo-3_acq-neg_ihmt.json - - echo-3_acq-neg_ihmt.nii.gz - - echo-3_acq-pos_ihmt.json - - echo-3_acq-pos_ihmt.nii.gz - - echo-3_acq-T1w_ihmt.json - - echo-3_acq-T1w_ihmt.nii.gz - - mask_resample.nii.gz - - MT.zip - - mask.nii.gz - - sub-001_echo-1_acq-mtoff_mtsat.json - - sub-001_echo-1_acq-mtoff_mtsat.nii.gz - - sub-001_echo-1_acq-mton_mtsat.json - - sub-001_echo-1_acq-mton_mtsat.nii.gz - - sub-001_echo-1_acq-t1w_mtsat.json - - sub-001_echo-1_acq-t1w_mtsat.nii.gz - - sub-001_echo-2_acq-mtoff_mtsat.json - - sub-001_echo-2_acq-mtoff_mtsat.nii.gz - - sub-001_echo-2_acq-mton_mtsat.json - - sub-001_echo-2_acq-mton_mtsat.nii.gz - - sub-001_echo-2_acq-t1w_mtsat.json - - sub-001_echo-2_acq-t1w_mtsat.nii.gz - - sub-001_echo-3_acq-mtoff_mtsat.json - - sub-001_echo-3_acq-mtoff_mtsat.nii.gz - - sub-001_echo-3_acq-mton_mtsat.json - - sub-001_echo-3_acq-mton_mtsat.nii.gz - - sub-001_echo-3_acq-t1w_mtsat.json - - sub-001_echo-3_acq-t1w_mtsat.nii.gz - - sub-001_echo-4_acq-mtoff_mtsat.json - - sub-001_echo-4_acq-mtoff_mtsat.nii.gz - - sub-001_echo-4_acq-mton_mtsat.json - - sub-001_echo-4_acq-mton_mtsat.nii.gz - - sub-001_echo-4_acq-t1w_mtsat.json - - sub-001_echo-4_acq-t1w_mtsat.nii.gz - - sub-001_echo-5_acq-mtoff_mtsat.json - - sub-001_echo-5_acq-mtoff_mtsat.nii.gz - - sub-001_echo-5_acq-mton_mtsat.json - - sub-001_echo-5_acq-mton_mtsat.nii.gz - - sub-001_echo-5_acq-t1w_mtsat.json - - sub-001_echo-5_acq-t1w_mtsat.nii.gz - - sub-001_run-01_B1map.json - - sub-001_run-01_B1map.nii.gz - - atlas.zip - - atlas_freesurfer_v2.nii.gz - - atlas_freesurfer_v2_LUT.json - - atlas_freesurfer_v2_labels_list.txt - - atlas_freesurfer_v2_no_brainstem.nii.gz - - atlas_freesurfer_v2_single_brainstem.nii.gz - - atlas_freesurfer_v2_single_brainstem_dil.nii.gz - - bash.sh - - brainstem.nii.gz - - brainstem_173.nii.gz - - brainstem_174.nii.gz - - brainstem_175.nii.gz - - brainstem_bin.nii.gz - - 10.nii.gz - - 1000.nii.gz - - 1001.nii.gz - - 1002.nii.gz - - 1003.nii.gz - - 1005.nii.gz - - 1006.nii.gz - - 1008.nii.gz - - 1009.nii.gz - - 1010.nii.gz - - 1011.nii.gz - - 1012.nii.gz - - 1013.nii.gz - - 1014.nii.gz - - 1015.nii.gz - - 1016.nii.gz - - 1017.nii.gz - - 1018.nii.gz - - 1019.nii.gz - - 1020.nii.gz - - 1021.nii.gz - - 1022.nii.gz - - 1023.nii.gz - - 1024.nii.gz - - 1025.nii.gz - - 1026.nii.gz - - 1027.nii.gz - - 1028.nii.gz - - 1029.nii.gz - - 1030.nii.gz - - 1031.nii.gz - - 1032.nii.gz - - 1033.nii.gz - - 1034.nii.gz - - 1035.nii.gz - - 11.nii.gz - - 12.nii.gz - - 13.nii.gz - - 17.nii.gz - - 173.nii.gz - - 174.nii.gz - - 175.nii.gz - - 18.nii.gz - - 2000.nii.gz - - 2001.nii.gz - - 2002.nii.gz - - 2003.nii.gz - - 2005.nii.gz - - 2006.nii.gz - - 2007.nii.gz - - 2008.nii.gz - - 2009.nii.gz - - 2010.nii.gz - - 2011.nii.gz - - 2012.nii.gz - - 2013.nii.gz - - 2014.nii.gz - - 2015.nii.gz - - 2016.nii.gz - - 2017.nii.gz - - 2018.nii.gz - - 2019.nii.gz - - 2020.nii.gz - - 2021.nii.gz - - 2022.nii.gz - - 2023.nii.gz - - 2024.nii.gz - - 2025.nii.gz - - 2026.nii.gz - - 2027.nii.gz - - 2028.nii.gz - - 2029.nii.gz - - 2030.nii.gz - - 2031.nii.gz - - 2032.nii.gz - - 2033.nii.gz - - 2034.nii.gz - - 2035.nii.gz - - 26.nii.gz - - 28.nii.gz - - 47.nii.gz - - 49.nii.gz - - 50.nii.gz - - 51.nii.gz - - 52.nii.gz - - 53.nii.gz - - 54.nii.gz - - 58.nii.gz - - 60.nii.gz - - 8.nii.gz - - 85.nii.gz - - 8_10.nii.gz - - left_accumbens_area.nii.gz - - left_amygdala.nii.gz - - left_banks_superior_temporal.nii.gz - - left_caudal_anterior_cingulate.nii.gz - - left_caudal_middle_fontral.nii.gz - - left_caudate.nii.gz - - left_cerebellum_cortex.nii.gz - - left_corpus_callosum.nii.gz - - left_cuneus.nii.gz - - left_entorhinal.nii.gz - - left_frontal_pole.nii.gz - - left_fusiform.nii.gz - - left_hippocampus.nii.gz - - left_inferior_parietal.nii.gz - - left_inferior_temporal.nii.gz - - left_insula.nii.gz - - left_isthmus_cingulate.nii.gz - - left_lateral_occipital.nii.gz - - left_laterral_orbitofrontal.nii.gz - - left_lingual.nii.gz - - left_medial_orbitofrontal.nii.gz - - left_middle_temporal.nii.gz - - left_pallidum.nii.gz - - left_paracentral.nii.gz - - left_parahipocampal.nii.gz - - left_parsopercularis.nii.gz - - left_parsorbitalis.nii.gz - - left_parstriangularis.nii.gz - - left_pericalcarine.nii.gz - - left_postcentral.nii.gz - - left_posteriorcingulate.nii.gz - - left_precentral.nii.gz - - left_precuneus.nii.gz - - left_putamen.nii.gz - - left_rostral_anterior_cingulate.nii.gz - - left_rostral_middle_frontal.nii.gz - - left_superior_frontral.nii.gz - - left_superior_parietal.nii.gz - - left_superior_temporal.nii.gz - - left_supra_marginal.nii.gz - - left_temporal_pole.nii.gz - - left_thalamus_proper.nii.gz - - left_transverse_temporal.nii.gz - - left_ventraldc.nii.gz - - midbrain.nii.gz - - oblongata.nii.gz - - pons.nii.gz - - right_accumbens_area.nii.gz - - right_amygdala.nii.gz - - right_banks_superior_temporal.nii.gz - - right_caudal_anterior_cingulate.nii.gz - - right_caudal_middle_fontral.nii.gz - - right_caudate.nii.gz - - right_cerebellum_cortex.nii.gz - - right_corpus_callosum.nii.gz - - right_cuneus.nii.gz - - right_entorhinal.nii.gz - - right_frontal_pole.nii.gz - - right_fusiform.nii.gz - - right_hippocampus.nii.gz - - right_inferior_parietal.nii.gz - - right_inferior_temporal.nii.gz - - right_insula.nii.gz - - right_isthmus_cingulate.nii.gz - - right_lateral_occipital.nii.gz - - right_laterral_orbitofrontal.nii.gz - - right_lingual.nii.gz - - right_medial_orbitofrontal.nii.gz - - right_middle_temporal.nii.gz - - right_pallidum.nii.gz - - right_paracentral.nii.gz - - right_parahipocampal.nii.gz - - right_parsopercularis.nii.gz - - right_parsorbitalis.nii.gz - - right_parstriangularis.nii.gz - - right_pericalcarine.nii.gz - - right_postcentral.nii.gz - - right_posteriorcingulate.nii.gz - - right_precentral.nii.gz - - right_precuneus.nii.gz - - right_putamen.nii.gz - - right_rostral_anterior_cingulate.nii.gz - - right_rostral_middle_frontal.nii.gz - - right_superior_frontral.nii.gz - - right_superior_parietal.nii.gz - - right_superior_temporal.nii.gz - - right_supra_marginal.nii.gz - - right_temporal_pole.nii.gz - - right_thalamus_proper.nii.gz - - right_transverse_temporal.nii.gz - - right_ventraldc.nii.gz - - bst.zip - - todi_mask.nii.gz - - template_lin.nii.gz - - rpt_m_warp.trk - - rpt_m_lin.trk - - output1InverseWarp.nii.gz - - output0GenericAffine.mat - - out_lw_tdi.nii.gz - - mask.nii.gz - - lw_todi_sh.nii.gz - - fodf.nii.gz - - fa.nii.gz - - bash.sh - - template0.nii.gz - - rpt_m.trk - - todi_mask.nii.gz - - priors.nii.gz - - endpoints_mask.nii.gz - - efod.nii.gz - - bundles.zip - - transformation_rigid.txt - - sagittal_glass.png - - sagittal_3d.png - - coronal_glass.png - - coronal_3d.png - - clusters.pkl - - bundle_all_1mm_ic.trk - - bundle_all_1mm.trk - - bundle_all_1mm.nii.gz - - bundle_0_similarity.json - - bundle_0_reco.tck - - bundle_0_measures.json - - bundle_0_binary.json - - bash.sh - - axial_glass.png - - axial_3d.png - - affine.txt - - results.json - - logfile.txt - - bundle_6.trk - - bundle_5.trk - - bundle_4.trk - - bundle_3.trk - - bundle_2.trk - - bundle_1.trk - - bundle_0.trk - - default_config_sim.json - - bundle_all_1mm.nii.gz - - bundle_6.trk - - bundle_5.trk - - bundle_4.trk - - bundle_3.trk - - bundle_2.trk - - bundle_1.trk - - bundle_0.trk - - bundle_6.trk - - bundle_5.trk - - bundle_4.trk - - bundle_3.trk - - bundle_2.trk - - bundle_1.trk - - bundle_0.trk - - bundle_6.trk - - bundle_5.trk - - bundle_4.trk - - bundle_3.trk - - bundle_2.trk - - bundle_1.trk - - bundle_0.trk - - commit_amico.zip - - cmd - - tracking.trk - - dwi.bvec - - dwi.bval - - wm_frf.txt - - gm_frf.txt - - csf_frf.txt - - peaks.nii.gz - - mask.nii.gz - - md.nii.gz - - fa.nii.gz - - dwi.nii.gz - - ad.nii.gz - - connectivity.zip - - vol.npy - - success.txt - - sim.npy - - sc_thr.npy - - sc_reo_fake.npy - - sc_normalize.npy - - sc_norm_vol.npy - - sc_norm.png - - sc_norm.npy - - sc_masked.npy - - sc_lower_threshold.npy - - sc_add_10.npy - - sc.npy - - reorder.txt - - pval.npy - - mask.npy - - len.npy - - labels_list.txt - - gtm.json - - fodf.nii.gz - - endpoints_atlas.nii.gz - - decompose_afd_rd.h5 - - decompose.h5 - - bundle_all_1mm.trk - - bash.sh - - affine.txt - - afd_max.npy - - afd_max.nii.gz - - 9_9.trk - - 5_6.trk - - 4_8.trk - - 3_7.trk - - 2_7.trk - - 1_7.trk - - 1_10.trk - - 9_9.nii.gz - - 5_6.nii.gz - - 4_8.nii.gz - - 3_7.nii.gz - - 2_7.nii.gz - - 1_7.nii.gz - - 1_10.nii.gz - - filtering.zip - - voting_voxels.nii.gz - - voting_streamlines.trk - - sc.bdo - - mosaic.png - - mask.nii.gz - - centroids.trk - - bundle_all_1mm_inliers.trk - - bundle_all_1mm.trk - - bundle_all_1mm.nii.gz - - bundle_4_filtered_no_loops.trk - - bundle_4_filtered.trk - - bundle_4_filtered.nii.gz - - bundle_4.trk - - bash.sh - - cluster_9.trk - - cluster_8.trk - - cluster_7.trk - - cluster_6.trk - - cluster_5.trk - - cluster_4.trk - - cluster_3.trk - - cluster_2.trk - - cluster_19.trk - - cluster_18.trk - - cluster_17.trk - - cluster_16.trk - - cluster_15.trk - - cluster_14.trk - - cluster_13.trk - - cluster_12.trk - - cluster_11.trk - - cluster_10.trk - - cluster_1.trk - - cluster_0.trk - - others.zip - - t1_reshape.nii.gz - - t1_resample.nii.gz - - t1_crop_denoised.nii.gz - - t1_crop.nii.gz - - t1.nii.gz - - rgb_.nii.gz - - rgb.nii.gz - - fibercup_bundles_color.trk - - fibercup_bundles.trk - - fibercup_bundle_0_color.trk - - fibercup_bundle_0.trk - - fa_resample.nii.gz - - fa.nii.gz - - encoding.b - - empty.trk - - density.nii.gz - - bash.sh - - atlas_freesurfer_v2_LUT.json - - atlas_freesurfer_v2.nii.gz - - IFGWM_sub.trk - - IFGWM.trk - - processing.zip - - fodf_descoteaux07_sub_full.nii.gz - - fodf_descoteaux07_sub.nii.gz - - fd.nii.gz - - fodf_bingham.nii.gz - - dwi_noise_mask.nii.gz - - 1000.b - - 1000.bval - - 1000.bvec - - 1000_flip.b - - 3000.bval - - 3000.bvec - - ad.nii.gz - - ad_ransanc.nii.gz - - afd_max.nii.gz - - afd_sum.nii.gz - - afd_test.nii.gz - - afd_tot.nii.gz - - b0_mean.nii.gz - - b0_mean_n4.nii.gz - - bias_field_b0.nii.gz - - diff.nii.gz - - dki_ad.nii.gz - - dki_fa.nii.gz - - dki_md.nii.gz - - dki_rd.nii.gz - - dwi.bval - - dwi.bvec - - dwi.nii.gz - - dwi_crop.nii.gz - - dwi_crop_1000.nii.gz - - dwi_crop_3000.nii.gz - - dwi_crop_n4.nii.gz - - fa.nii.gz - - fa_low.nii.gz - - fa_thr.nii.gz - - fodf.nii.gz - - fodf_descoteaux07.nii.gz - - frf.txt - - md.nii.gz - - mfrf.txt - - mni_masked_2x2x2.nii.gz - - nfrf.txt - - nufo.nii.gz - - peaks.nii.gz - - rd.nii.gz - - rd_test.nii.gz - - seed.nii.gz - - sh.nii.gz - - sh_1000.nii.gz - - sh_3000.nii.gz - - tracking.trk - - cc.nii.gz - - dwi_graph.png - - dwi_SNR.json - - small_roi_gm_mask.nii.gz - - axial.png - - coronal.png - - sagittal.png - - surface_vtk_fib.zip - - rhpialt.vtk - - log.txt - - lhpialt_smooth.vtk - - lhpialt_lin.vtk - - lhpialt.vtk - - lh.pialt_xform - - gyri_fanning_c.trk - - gyri_fanning.trk - - gyri_fanning.tck - - gyri_fanning.fib - - fa_flip.nii.gz - - fa.nii.gz - - bash.sh - - affine.txt - - tracking.zip - - interface.nii.gz - - bash.sh - - fa.nii.gz - - fodf.nii.gz - - local.trk - - local_split_0.trk - - local_split_1.trk - - local_split_2.trk - - map_csf.nii.gz - - map_exclude.nii.gz - - map_exclude_corr.nii.gz - - map_gm.nii.gz - - map_include.nii.gz - - map_include_corr.nii.gz - - map_wm.nii.gz - - peaks.nii.gz - - pft.trk - - seeding_mask.nii.gz - - union.trk - - union_shuffle.trk - - union_shuffle_sub.trk - - union_shuffle_sub_smooth.trk - - tractograms.zip - - bundle_4_cut_endpoints.tck - - bundle_4_cut_center.tck - - bundle_4_endpoints_5points.nii.gz - - bundle_4_center.nii.gz - - bundle_4_head_tail.nii.gz - - bundle_4_head_tail_offset.nii.gz - - bundle_4.tck - - bundle_4_endpoints_1point.nii.gz - - bundle_4_wm.nii.gz - - bundle_4_endpoints_1point.nii.gz - - bundle_4_cut_endpoints.tck - - bundle_4_center.nii.gz - - bundle_4_head_tail.nii.gz - - bundle_4_head_tail_offset.nii.gz - - bundle_4.tck - - bundle_4_wm.nii.gz - - bundle_4_cut_center.tck - - bundle_4_endpoints_5points.nii.gz - - tractometry.zip - - tail.nii.gz - - mni_masked.nii.gz - - metric_label.json - - length_stats_2.json - - length_stats_1.json - - length_stats.xlsx - - label.npz - - head.nii.gz - - distance.npz - - bash.sh - - IFGWM_uni_c_10.trk - - IFGWM_uni_c.trk - - IFGWM_uni.trk - - IFGWM_labels_map.nii.gz - - IFGWM_color.trk - - IFGWM.trk - - IFGWM.nii.gz - - IFGWM_mni_masked.png - - stats.zip - - participants.tsv - - meanstd_all.json - - sub-2120_rd.npy - - sub-2120_nufo.npy - - sub-2120_md.npy - - sub-2120_fa.npy - - sub-2120_afd_total.npy - - sub-2120_afd_fixel.npy - - sub-2120_ad.npy - - sub-1230_rd.npy - - sub-1230_nufo.npy - - sub-1230_md.npy - - sub-1230_fa.npy - - sub-1230_afd_total.npy - - sub-1230_afd_fixel.npy - - sub-1230_ad.npy - - sub-1108_rd.npy - - sub-1108_nufo.npy - - sub-1108_md.npy - - sub-1108_fa.npy - - sub-1108_afd_total.npy - - sub-1108_afd_fixel.npy - - sub-1108_ad.npy - - sub-1005_rd.npy - - sub-1005_nufo.npy - - sub-1005_md.npy - - sub-1005_fa.npy - - sub-1005_afd_total.npy - - sub-1005_afd_fixel.npy - - sub-1005_ad.npy - - list_id.txt - - anatomical_filtering.zip - - tractogram_filter_ana.trk - - wmparc_filter_ana.nii.gz - - btensor_testdata.zip - - wm_frf.txt - - spherical.bvecs - - spherical.bvals - - planar.bvecs - - planar.bvals - - md.nii.gz - - linear.bvecs - - linear.bvals - - gm_frf.txt - - fa.nii.gz - - dwi_spherical.nii.gz - - dwi_planar.nii.gz - - dwi_linear.nii.gz - - csf_frf.txt - - fodf_filtering.zip - - fodf_descoteaux07_sub_twice.nii.gz - - fodf_descoteaux07_sub_sym.nii.gz - - fodf_descoteaux07_sub_full.nii.gz - - fodf_descoteaux07_sub.nii.gz +- bids_json.zip + - result_real_dwi_real_rev_dwi_sbref.json + - result_real_dwi_real_rev_dwi.json + - result_complex_dwi_complex_rev_dwi_sbref.json + - result_real_dwi_epi.json + - result_complex_dwi_complex_rev_dwi.json + - result_complex_dwi_epi.json + - result_real_dwi_real_sbref.json + - result_complex_dwi_complex_sbref.json +- plot.zip + - atlas_brainnetome.nii.gz + - ad.nii.gz + - fa.nii.gz + - map_gm.nii.gz + - map_wm.nii.gz + - mask_gm.nii.gz + - mask_wm.nii.gz + - atlas_brainnetome.json +- ihMT.zip + - B1map.json + - B1map.nii.gz + - echo-1_acq-altnp_ihmt.json + - echo-1_acq-altnp_ihmt.nii.gz + - echo-1_acq-altpn_ihmt.json + - echo-1_acq-altpn_ihmt.nii.gz + - echo-1_acq-mtoff_ihmt.json + - echo-1_acq-mtoff_ihmt.nii.gz + - echo-1_acq-neg_ihmt.json + - echo-1_acq-neg_ihmt.nii.gz + - echo-1_acq-pos_ihmt.json + - echo-1_acq-pos_ihmt.nii.gz + - echo-1_acq-T1w_ihmt.json + - echo-1_acq-T1w_ihmt.nii.gz + - echo-2_acq-altnp_ihmt.json + - echo-2_acq-altnp_ihmt.nii.gz + - echo-2_acq-altpn_ihmt.json + - echo-2_acq-altpn_ihmt.nii.gz + - echo-2_acq-mtoff_ihmt.json + - echo-2_acq-mtoff_ihmt.nii.gz + - echo-2_acq-neg_ihmt.json + - echo-2_acq-neg_ihmt.nii.gz + - echo-2_acq-pos_ihmt.json + - echo-2_acq-pos_ihmt.nii.gz + - echo-2_acq-T1w_ihmt.json + - echo-2_acq-T1w_ihmt.nii.gz + - echo-3_acq-altnp_ihmt.json + - echo-3_acq-altnp_ihmt.nii.gz + - echo-3_acq-altpn_ihmt.json + - echo-3_acq-altpn_ihmt.nii.gz + - echo-3_acq-mtoff_ihmt.json + - echo-3_acq-mtoff_ihmt.nii.gz + - echo-3_acq-neg_ihmt.json + - echo-3_acq-neg_ihmt.nii.gz + - echo-3_acq-pos_ihmt.json + - echo-3_acq-pos_ihmt.nii.gz + - echo-3_acq-T1w_ihmt.json + - echo-3_acq-T1w_ihmt.nii.gz + - mask_resample.nii.gz +- MT.zip + - mask.nii.gz + - sub-001_echo-1_acq-mtoff_mtsat.json + - sub-001_echo-1_acq-mtoff_mtsat.nii.gz + - sub-001_echo-1_acq-mton_mtsat.json + - sub-001_echo-1_acq-mton_mtsat.nii.gz + - sub-001_echo-1_acq-t1w_mtsat.json + - sub-001_echo-1_acq-t1w_mtsat.nii.gz + - sub-001_echo-2_acq-mtoff_mtsat.json + - sub-001_echo-2_acq-mtoff_mtsat.nii.gz + - sub-001_echo-2_acq-mton_mtsat.json + - sub-001_echo-2_acq-mton_mtsat.nii.gz + - sub-001_echo-2_acq-t1w_mtsat.json + - sub-001_echo-2_acq-t1w_mtsat.nii.gz + - sub-001_echo-3_acq-mtoff_mtsat.json + - sub-001_echo-3_acq-mtoff_mtsat.nii.gz + - sub-001_echo-3_acq-mton_mtsat.json + - sub-001_echo-3_acq-mton_mtsat.nii.gz + - sub-001_echo-3_acq-t1w_mtsat.json + - sub-001_echo-3_acq-t1w_mtsat.nii.gz + - sub-001_echo-4_acq-mtoff_mtsat.json + - sub-001_echo-4_acq-mtoff_mtsat.nii.gz + - sub-001_echo-4_acq-mton_mtsat.json + - sub-001_echo-4_acq-mton_mtsat.nii.gz + - sub-001_echo-4_acq-t1w_mtsat.json + - sub-001_echo-4_acq-t1w_mtsat.nii.gz + - sub-001_echo-5_acq-mtoff_mtsat.json + - sub-001_echo-5_acq-mtoff_mtsat.nii.gz + - sub-001_echo-5_acq-mton_mtsat.json + - sub-001_echo-5_acq-mton_mtsat.nii.gz + - sub-001_echo-5_acq-t1w_mtsat.json + - sub-001_echo-5_acq-t1w_mtsat.nii.gz + - sub-001_run-01_B1map.json + - sub-001_run-01_B1map.nii.gz +- atlas.zip + - atlas_freesurfer_v2.nii.gz + - atlas_freesurfer_v2_LUT.json + - atlas_freesurfer_v2_labels_list.txt + - atlas_freesurfer_v2_no_brainstem.nii.gz + - atlas_freesurfer_v2_single_brainstem.nii.gz + - atlas_freesurfer_v2_single_brainstem_dil.nii.gz + - bash.sh + - brainstem.nii.gz + - brainstem_173.nii.gz + - brainstem_174.nii.gz + - brainstem_175.nii.gz + - brainstem_bin.nii.gz + - 10.nii.gz + - 1000.nii.gz + - 1001.nii.gz + - 1002.nii.gz + - 1003.nii.gz + - 1005.nii.gz + - 1006.nii.gz + - 1008.nii.gz + - 1009.nii.gz + - 1010.nii.gz + - 1011.nii.gz + - 1012.nii.gz + - 1013.nii.gz + - 1014.nii.gz + - 1015.nii.gz + - 1016.nii.gz + - 1017.nii.gz + - 1018.nii.gz + - 1019.nii.gz + - 1020.nii.gz + - 1021.nii.gz + - 1022.nii.gz + - 1023.nii.gz + - 1024.nii.gz + - 1025.nii.gz + - 1026.nii.gz + - 1027.nii.gz + - 1028.nii.gz + - 1029.nii.gz + - 1030.nii.gz + - 1031.nii.gz + - 1032.nii.gz + - 1033.nii.gz + - 1034.nii.gz + - 1035.nii.gz + - 11.nii.gz + - 12.nii.gz + - 13.nii.gz + - 17.nii.gz + - 173.nii.gz + - 174.nii.gz + - 175.nii.gz + - 18.nii.gz + - 2000.nii.gz + - 2001.nii.gz + - 2002.nii.gz + - 2003.nii.gz + - 2005.nii.gz + - 2006.nii.gz + - 2007.nii.gz + - 2008.nii.gz + - 2009.nii.gz + - 2010.nii.gz + - 2011.nii.gz + - 2012.nii.gz + - 2013.nii.gz + - 2014.nii.gz + - 2015.nii.gz + - 2016.nii.gz + - 2017.nii.gz + - 2018.nii.gz + - 2019.nii.gz + - 2020.nii.gz + - 2021.nii.gz + - 2022.nii.gz + - 2023.nii.gz + - 2024.nii.gz + - 2025.nii.gz + - 2026.nii.gz + - 2027.nii.gz + - 2028.nii.gz + - 2029.nii.gz + - 2030.nii.gz + - 2031.nii.gz + - 2032.nii.gz + - 2033.nii.gz + - 2034.nii.gz + - 2035.nii.gz + - 26.nii.gz + - 28.nii.gz + - 47.nii.gz + - 49.nii.gz + - 50.nii.gz + - 51.nii.gz + - 52.nii.gz + - 53.nii.gz + - 54.nii.gz + - 58.nii.gz + - 60.nii.gz + - 8.nii.gz + - 85.nii.gz + - 8_10.nii.gz + - left_accumbens_area.nii.gz + - left_amygdala.nii.gz + - left_banks_superior_temporal.nii.gz + - left_caudal_anterior_cingulate.nii.gz + - left_caudal_middle_fontral.nii.gz + - left_caudate.nii.gz + - left_cerebellum_cortex.nii.gz + - left_corpus_callosum.nii.gz + - left_cuneus.nii.gz + - left_entorhinal.nii.gz + - left_frontal_pole.nii.gz + - left_fusiform.nii.gz + - left_hippocampus.nii.gz + - left_inferior_parietal.nii.gz + - left_inferior_temporal.nii.gz + - left_insula.nii.gz + - left_isthmus_cingulate.nii.gz + - left_lateral_occipital.nii.gz + - left_laterral_orbitofrontal.nii.gz + - left_lingual.nii.gz + - left_medial_orbitofrontal.nii.gz + - left_middle_temporal.nii.gz + - left_pallidum.nii.gz + - left_paracentral.nii.gz + - left_parahipocampal.nii.gz + - left_parsopercularis.nii.gz + - left_parsorbitalis.nii.gz + - left_parstriangularis.nii.gz + - left_pericalcarine.nii.gz + - left_postcentral.nii.gz + - left_posteriorcingulate.nii.gz + - left_precentral.nii.gz + - left_precuneus.nii.gz + - left_putamen.nii.gz + - left_rostral_anterior_cingulate.nii.gz + - left_rostral_middle_frontal.nii.gz + - left_superior_frontral.nii.gz + - left_superior_parietal.nii.gz + - left_superior_temporal.nii.gz + - left_supra_marginal.nii.gz + - left_temporal_pole.nii.gz + - left_thalamus_proper.nii.gz + - left_transverse_temporal.nii.gz + - left_ventraldc.nii.gz + - midbrain.nii.gz + - oblongata.nii.gz + - pons.nii.gz + - right_accumbens_area.nii.gz + - right_amygdala.nii.gz + - right_banks_superior_temporal.nii.gz + - right_caudal_anterior_cingulate.nii.gz + - right_caudal_middle_fontral.nii.gz + - right_caudate.nii.gz + - right_cerebellum_cortex.nii.gz + - right_corpus_callosum.nii.gz + - right_cuneus.nii.gz + - right_entorhinal.nii.gz + - right_frontal_pole.nii.gz + - right_fusiform.nii.gz + - right_hippocampus.nii.gz + - right_inferior_parietal.nii.gz + - right_inferior_temporal.nii.gz + - right_insula.nii.gz + - right_isthmus_cingulate.nii.gz + - right_lateral_occipital.nii.gz + - right_laterral_orbitofrontal.nii.gz + - right_lingual.nii.gz + - right_medial_orbitofrontal.nii.gz + - right_middle_temporal.nii.gz + - right_pallidum.nii.gz + - right_paracentral.nii.gz + - right_parahipocampal.nii.gz + - right_parsopercularis.nii.gz + - right_parsorbitalis.nii.gz + - right_parstriangularis.nii.gz + - right_pericalcarine.nii.gz + - right_postcentral.nii.gz + - right_posteriorcingulate.nii.gz + - right_precentral.nii.gz + - right_precuneus.nii.gz + - right_putamen.nii.gz + - right_rostral_anterior_cingulate.nii.gz + - right_rostral_middle_frontal.nii.gz + - right_superior_frontral.nii.gz + - right_superior_parietal.nii.gz + - right_superior_temporal.nii.gz + - right_supra_marginal.nii.gz + - right_temporal_pole.nii.gz + - right_thalamus_proper.nii.gz + - right_transverse_temporal.nii.gz + - right_ventraldc.nii.gz +- bst.zip + - todi_mask.nii.gz + - template_lin.nii.gz + - rpt_m_warp.trk + - rpt_m_lin.trk + - output1InverseWarp.nii.gz + - output0GenericAffine.mat + - out_lw_tdi.nii.gz + - mask.nii.gz + - lw_todi_sh.nii.gz + - fodf.nii.gz + - fa.nii.gz + - bash.sh + - template0.nii.gz + - rpt_m.trk + - todi_mask.nii.gz + - priors.nii.gz + - endpoints_mask.nii.gz + - efod.nii.gz +- bundles.zip + - transformation_rigid.txt + - sagittal_glass.png + - sagittal_3d.png + - coronal_glass.png + - coronal_3d.png + - clusters.pkl + - bundle_all_1mm_ic.trk + - bundle_all_1mm.trk + - bundle_all_1mm.nii.gz + - bundle_0_similarity.json + - bundle_0_reco.tck + - bundle_0_measures.json + - bundle_0_binary.json + - bash.sh + - axial_glass.png + - axial_3d.png + - affine.txt + - results.json + - logfile.txt + - bundle_6.trk + - bundle_5.trk + - bundle_4.trk + - bundle_3.trk + - bundle_2.trk + - bundle_1.trk + - bundle_0.trk + - default_config_sim.json + - bundle_all_1mm.nii.gz + - bundle_6.trk + - bundle_5.trk + - bundle_4.trk + - bundle_3.trk + - bundle_2.trk + - bundle_1.trk + - bundle_0.trk + - bundle_6.trk + - bundle_5.trk + - bundle_4.trk + - bundle_3.trk + - bundle_2.trk + - bundle_1.trk + - bundle_0.trk + - bundle_6.trk + - bundle_5.trk + - bundle_4.trk + - bundle_3.trk + - bundle_2.trk + - bundle_1.trk + - bundle_0.trk +- commit_amico.zip + - cmd + - tracking.trk + - dwi.bvec + - dwi.bval + - wm_frf.txt + - gm_frf.txt + - csf_frf.txt + - peaks.nii.gz + - mask.nii.gz + - md.nii.gz + - fa.nii.gz + - dwi.nii.gz + - ad.nii.gz +- connectivity.zip + - vol.npy + - success.txt + - sim.npy + - sc_thr.npy + - sc_reo_fake.npy + - sc_normalize.npy + - sc_norm_vol.npy + - sc_norm.png + - sc_norm.npy + - sc_masked.npy + - sc_lower_threshold.npy + - sc_add_10.npy + - sc.npy + - reorder.txt + - pval.npy + - mask.npy + - len.npy + - labels_list.txt + - gtm.json + - fodf.nii.gz + - endpoints_atlas.nii.gz + - decompose_afd_rd.h5 + - decompose.h5 + - bundle_all_1mm.trk + - bash.sh + - affine.txt + - afd_max.npy + - afd_max.nii.gz + - 9_9.trk + - 5_6.trk + - 4_8.trk + - 3_7.trk + - 2_7.trk + - 1_7.trk + - 1_10.trk + - 9_9.nii.gz + - 5_6.nii.gz + - 4_8.nii.gz + - 3_7.nii.gz + - 2_7.nii.gz + - 1_7.nii.gz + - 1_10.nii.gz +- filtering.zip + - voting_voxels.nii.gz + - voting_streamlines.trk + - sc.bdo + - mosaic.png + - mask.nii.gz + - centroids.trk + - bundle_all_1mm_inliers.trk + - bundle_all_1mm.trk + - bundle_all_1mm.nii.gz + - bundle_4_filtered_no_loops.trk + - bundle_4_filtered.trk + - bundle_4_filtered.nii.gz + - bundle_4.trk + - bash.sh + - cluster_9.trk + - cluster_8.trk + - cluster_7.trk + - cluster_6.trk + - cluster_5.trk + - cluster_4.trk + - cluster_3.trk + - cluster_2.trk + - cluster_19.trk + - cluster_18.trk + - cluster_17.trk + - cluster_16.trk + - cluster_15.trk + - cluster_14.trk + - cluster_13.trk + - cluster_12.trk + - cluster_11.trk + - cluster_10.trk + - cluster_1.trk + - cluster_0.trk +- others.zip + - t1_reshape.nii.gz + - t1_resample.nii.gz + - t1_crop_denoised.nii.gz + - t1_crop.nii.gz + - t1.nii.gz + - rgb\_.nii.gz + - rgb.nii.gz + - fibercup_bundles_color.trk + - fibercup_bundles.trk + - fibercup_bundle_0_color.trk + - fibercup_bundle_0.trk + - fa_resample.nii.gz + - fa.nii.gz + - encoding.b + - empty.trk + - density.nii.gz + - bash.sh + - atlas_freesurfer_v2_LUT.json + - atlas_freesurfer_v2.nii.gz + - IFGWM_sub.trk + - IFGWM.trk +- processing.zip + - fodf_descoteaux07_sub_full.nii.gz + - fodf_descoteaux07_sub.nii.gz + - fd.nii.gz + - fodf_bingham.nii.gz + - dwi_noise_mask.nii.gz + - 1000.b + - 1000.bval + - 1000.bvec + - 1000_flip.b + - 3000.bval + - 3000.bvec + - ad.nii.gz + - ad_ransanc.nii.gz + - afd_max.nii.gz + - afd_sum.nii.gz + - afd_test.nii.gz + - afd_tot.nii.gz + - b0_mean.nii.gz + - b0_mean_n4.nii.gz + - bias_field_b0.nii.gz + - diff.nii.gz + - dki_ad.nii.gz + - dki_fa.nii.gz + - dki_md.nii.gz + - dki_rd.nii.gz + - dwi.bval + - dwi.bvec + - dwi.nii.gz + - dwi_crop.nii.gz + - dwi_crop_1000.nii.gz + - dwi_crop_3000.nii.gz + - dwi_crop_n4.nii.gz + - fa.nii.gz + - fa_low.nii.gz + - fa_thr.nii.gz + - fodf.nii.gz + - fodf_descoteaux07.nii.gz + - frf.txt + - md.nii.gz + - mfrf.txt + - mni_masked_2x2x2.nii.gz + - nfrf.txt + - nufo.nii.gz + - peaks.nii.gz + - rd.nii.gz + - rd_test.nii.gz + - seed.nii.gz + - sh.nii.gz + - sh_1000.nii.gz + - sh_3000.nii.gz + - tracking.trk + - cc.nii.gz + - dwi_graph.png + - dwi_SNR.json + - small_roi_gm_mask.nii.gz + - axial.png + - coronal.png + - sagittal.png +- surface_vtk_fib.zip + - rhpialt.vtk + - log.txt + - lhpialt_smooth.vtk + - lhpialt_lin.vtk + - lhpialt.vtk + - lh.pialt_xform + - gyri_fanning_c.trk + - gyri_fanning.trk + - gyri_fanning.tck + - gyri_fanning.fib + - fa_flip.nii.gz + - fa.nii.gz + - bash.sh + - affine.txt +- tracking.zip + - interface.nii.gz + - bash.sh + - fa.nii.gz + - fodf.nii.gz + - local.trk + - local_split_0.trk + - local_split_1.trk + - local_split_2.trk + - map_csf.nii.gz + - map_exclude.nii.gz + - map_exclude_corr.nii.gz + - map_gm.nii.gz + - map_include.nii.gz + - map_include_corr.nii.gz + - map_wm.nii.gz + - peaks.nii.gz + - pft.trk + - seeding_mask.nii.gz + - union.trk + - union_shuffle.trk + - union_shuffle_sub.trk + - union_shuffle_sub_smooth.trk +- tractograms.zip + - bundle_4_cut_endpoints.tck + - bundle_4_cut_center.tck + - bundle_4_endpoints_5points.nii.gz + - bundle_4_center.nii.gz + - bundle_4_head_tail.nii.gz + - bundle_4_head_tail_offset.nii.gz + - bundle_4.tck + - bundle_4_endpoints_1point.nii.gz + - bundle_4_wm.nii.gz + - bundle_4_endpoints_1point.nii.gz + - bundle_4_cut_endpoints.tck + - bundle_4_center.nii.gz + - bundle_4_head_tail.nii.gz + - bundle_4_head_tail_offset.nii.gz + - bundle_4.tck + - bundle_4_wm.nii.gz + - bundle_4_cut_center.tck + - bundle_4_endpoints_5points.nii.gz +- tractometry.zip + - tail.nii.gz + - mni_masked.nii.gz + - metric_label.json + - length_stats_2.json + - length_stats_1.json + - length_stats.xlsx + - label.npz + - head.nii.gz + - distance.npz + - bash.sh + - IFGWM_uni_c_10.trk + - IFGWM_uni_c.trk + - IFGWM_uni.trk + - IFGWM_labels_map.nii.gz + - IFGWM_color.trk + - IFGWM.trk + - IFGWM.nii.gz + - IFGWM_mni_masked.png +- stats.zip + - participants.tsv + - meanstd_all.json + - sub-2120_rd.npy + - sub-2120_nufo.npy + - sub-2120_md.npy + - sub-2120_fa.npy + - sub-2120_afd_total.npy + - sub-2120_afd_fixel.npy + - sub-2120_ad.npy + - sub-1230_rd.npy + - sub-1230_nufo.npy + - sub-1230_md.npy + - sub-1230_fa.npy + - sub-1230_afd_total.npy + - sub-1230_afd_fixel.npy + - sub-1230_ad.npy + - sub-1108_rd.npy + - sub-1108_nufo.npy + - sub-1108_md.npy + - sub-1108_fa.npy + - sub-1108_afd_total.npy + - sub-1108_afd_fixel.npy + - sub-1108_ad.npy + - sub-1005_rd.npy + - sub-1005_nufo.npy + - sub-1005_md.npy + - sub-1005_fa.npy + - sub-1005_afd_total.npy + - sub-1005_afd_fixel.npy + - sub-1005_ad.npy + - list_id.txt +- anatomical_filtering.zip + - tractogram_filter_ana.trk + - wmparc_filter_ana.nii.gz +- btensor_testdata.zip + - wm_frf.txt + - spherical.bvecs + - spherical.bvals + - planar.bvecs + - planar.bvals + - md.nii.gz + - linear.bvecs + - linear.bvals + - gm_frf.txt + - fa.nii.gz + - dwi_spherical.nii.gz + - dwi_planar.nii.gz + - dwi_linear.nii.gz + - csf_frf.txt +- fodf_filtering.zip + - fodf_descoteaux07_sub_twice.nii.gz + - fodf_descoteaux07_sub_sym.nii.gz + - fodf_descoteaux07_sub_full.nii.gz + - fodf_descoteaux07_sub.nii.gz diff --git a/docs/pull_request_template.md b/docs/pull_request_template.md index becc2918d..6b7e8ff4e 100644 --- a/docs/pull_request_template.md +++ b/docs/pull_request_template.md @@ -6,13 +6,13 @@ - Create the tool: - [ ] Edit `./modules/nf-scil///main.nf` - - [ ] Edit `./modules/nf-scil///meta.yml` + - [ ] Edit `./modules/nf-scil///meta.yml` - Generate the tests: - [ ] Edit `./tests/modules/nf-scil///main.nf` - [ ] Edit `./tests/modules/nf-scil///nextflow.config` - [ ] Add test data locally for tests with the fork repository - - [ ] Generate the test infrastructure and *md5sum* for all outputs - - [ ] Test the module in a **REAL** DSL2 pipeline or a *dummy* one + - [ ] Generate the test infrastructure and _md5sum_ for all outputs + - [ ] Test the module in a **REAL** DSL2 pipeline or a _dummy_ one - Ensure the syntax is correct : - [ ] Check indentation abides with the rest of the library (don't hesitate to correct others !) - [ ] Lint everything. Ensure your variables have good names. diff --git a/modules/nf-scil/betcrop/fslbetcrop/meta.yml b/modules/nf-scil/betcrop/fslbetcrop/meta.yml index ffe9c5ed8..a047812ba 100644 --- a/modules/nf-scil/betcrop/fslbetcrop/meta.yml +++ b/modules/nf-scil/betcrop/fslbetcrop/meta.yml @@ -58,7 +58,7 @@ output: - bbox: type: file - description: DWI BoundingBox used for cropping. + description: DWI BoundingBox used for cropping. pattern: "*dwi_boundingBox.pkl" - versions: diff --git a/modules/nf-scil/denoising/mppca/meta.yml b/modules/nf-scil/denoising/mppca/meta.yml index 753824015..ea3dc9b87 100644 --- a/modules/nf-scil/denoising/mppca/meta.yml +++ b/modules/nf-scil/denoising/mppca/meta.yml @@ -1,7 +1,7 @@ --- # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json name: "denoising_mppca" -description: denoise a dataset with the Marchenko-Pastur principal component analysis +description: denoise a dataset with the Marchenko-Pastur principal component analysis keywords: - nifti - denoising diff --git a/modules/nf-scil/preproc/gibbs/meta.yml b/modules/nf-scil/preproc/gibbs/meta.yml index 494efb967..fab72e822 100644 --- a/modules/nf-scil/preproc/gibbs/meta.yml +++ b/modules/nf-scil/preproc/gibbs/meta.yml @@ -18,7 +18,7 @@ input: description: | Groovy Map containing sample information e.g. `[ id:'test', single_end:false ]` - + - dwi: type: file description: Nifti image file to correct diff --git a/modules/nf-scil/preproc/n4/meta.yml b/modules/nf-scil/preproc/n4/meta.yml index 055dae56c..59b140cd2 100644 --- a/modules/nf-scil/preproc/n4/meta.yml +++ b/modules/nf-scil/preproc/n4/meta.yml @@ -17,7 +17,7 @@ input: description: | Groovy Map containing sample information e.g. `[ id:'test', single_end:false ]` - + - dwi: type: file description: Nifti image file to correct @@ -31,7 +31,7 @@ input: - b0_mask: type: file description: Nifti image file mask - pattern: "*.{nii,nii.gz}" + pattern: "*.{nii,nii.gz}" output: - meta: diff --git a/modules/nf-scil/reconst/dtimetrics/meta.yml b/modules/nf-scil/reconst/dtimetrics/meta.yml index 7e623e4f6..1bc1bc21f 100644 --- a/modules/nf-scil/reconst/dtimetrics/meta.yml +++ b/modules/nf-scil/reconst/dtimetrics/meta.yml @@ -19,7 +19,7 @@ input: description: | Groovy Map containing sample information e.g. `[ id:'test', single_end:false ]` - + - dwi: type: file description: Nifti DWI volume used to extract DTI metrics. @@ -134,16 +134,16 @@ output: - nonphysical: type: file - description: Output filename for the voxels with physically implausible - signals where the mean of b=0 images is below one or more - diffusion-weighted images. + description: Output filename for the voxels with physically implausible + signals where the mean of b=0 images is below one or more + diffusion-weighted images. pattern: "*__nonphysical.{nii,nii.gz}" - pulsation_std_dwi: type: file - description: Standard deviation map across all diffusion-weighted images - and across b=0 images if more than one is available.Shows - pulsation and misalignment artifacts. + description: Standard deviation map across all diffusion-weighted images + and across b=0 images if more than one is available.Shows + pulsation and misalignment artifacts. pattern: "*__pulsation_std_dwi.{nii,nii.gz}" - residual: diff --git a/modules/nf-scil/reconst/frf/meta.yml b/modules/nf-scil/reconst/frf/meta.yml index a1f875495..b0210067f 100644 --- a/modules/nf-scil/reconst/frf/meta.yml +++ b/modules/nf-scil/reconst/frf/meta.yml @@ -21,7 +21,7 @@ input: description: | Groovy Map containing sample information e.g. `[ id:'test', single_end:false ]` - + - dwi: type: file description: Nifti image of DWI. diff --git a/modules/nf-scil/reconst/meanfrf/meta.yml b/modules/nf-scil/reconst/meanfrf/meta.yml index 5e64e4ac5..a37a30d5c 100644 --- a/modules/nf-scil/reconst/meanfrf/meta.yml +++ b/modules/nf-scil/reconst/meanfrf/meta.yml @@ -2,7 +2,7 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json name: "reconst_meanfrf" description: Compute the mean Fiber Response Function from a set of individually - computed Response Functions. + computed Response Functions. keywords: - FRF @@ -12,14 +12,12 @@ tools: homepage: "https://github.com/scilus/scilpy.git" input: - - frf_list: type: Path description: List of individual Fiber Response Function (FRF) path. pattern: "*.txt" output: - - meanfrf: type: file description: Averaged Fiber Response Function (FRF). diff --git a/modules/nf-scil/reconst/noddi/meta.yml b/modules/nf-scil/reconst/noddi/meta.yml index d2813ec84..105372982 100644 --- a/modules/nf-scil/reconst/noddi/meta.yml +++ b/modules/nf-scil/reconst/noddi/meta.yml @@ -92,4 +92,4 @@ output: pattern: "versions.yml" authors: - - "@Manonedde" \ No newline at end of file + - "@Manonedde" diff --git a/modules/nf-scil/segmentation/fastseg/meta.yml b/modules/nf-scil/segmentation/fastseg/meta.yml index b3c689982..440c8fd7f 100644 --- a/modules/nf-scil/segmentation/fastseg/meta.yml +++ b/modules/nf-scil/segmentation/fastseg/meta.yml @@ -35,7 +35,7 @@ output: - wm_mask: type: file - description: Nifti WM mask volume. + description: Nifti WM mask volume. pattern: "*.{nii,nii.gz}" - gm_mask: diff --git a/modules/nf-scil/segmentation/freesurferseg/meta.yml b/modules/nf-scil/segmentation/freesurferseg/meta.yml index d59aad005..cb1663a7f 100644 --- a/modules/nf-scil/segmentation/freesurferseg/meta.yml +++ b/modules/nf-scil/segmentation/freesurferseg/meta.yml @@ -19,10 +19,10 @@ input: description: | Groovy Map containing sample information e.g. `[ id:'test', single_end:false ]` - + - aparc_aseg: type: file - description: FreeSurfer cortical and subcortical parcellation output file. + description: FreeSurfer cortical and subcortical parcellation output file. pattern: "*.mgz" - wmparc: @@ -44,12 +44,12 @@ output: - gm_mask: type: file - description: Nifti GM mask file. + description: Nifti GM mask file. pattern: "*.{nii,nii.gz}" - csf_mask: type: file - description: Nifti CSF mask file. + description: Nifti CSF mask file. pattern: "*.{nii,nii.gz}" - versions: diff --git a/modules/nf-scil/testdata/scilpy/meta.yml b/modules/nf-scil/testdata/scilpy/meta.yml index 1d877ec82..f5e7edd05 100644 --- a/modules/nf-scil/testdata/scilpy/meta.yml +++ b/modules/nf-scil/testdata/scilpy/meta.yml @@ -22,14 +22,14 @@ input: - test_data_path: type: file description: Directory where to store the test data - pattern: "*" + pattern: "*" output: - test_data_path: type: file description: Directory where to store the test data - pattern: "*" - + pattern: "*" + - versions: type: file description: File containing software versions diff --git a/modules/nf-scil/tracking/pfttracking/meta.yml b/modules/nf-scil/tracking/pfttracking/meta.yml index c891c1835..d2b8c5167 100644 --- a/modules/nf-scil/tracking/pfttracking/meta.yml +++ b/modules/nf-scil/tracking/pfttracking/meta.yml @@ -2,9 +2,9 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json name: "tracking_pfttracking" description: Compute include and exclude maps, and the seeding mask - from partial volume estimation (PVE) maps. - Generates a tractogram using anatomically-constrained particle - filter tracking, Particle Filtering Tractography (PFT). + from partial volume estimation (PVE) maps. + Generates a tractogram using anatomically-constrained particle + filter tracking, Particle Filtering Tractography (PFT). keywords: - PFT - tractography diff --git a/modules/nf-scil/utils/extractb0/meta.yml b/modules/nf-scil/utils/extractb0/meta.yml index bd2141e15..4073264ef 100644 --- a/modules/nf-scil/utils/extractb0/meta.yml +++ b/modules/nf-scil/utils/extractb0/meta.yml @@ -18,7 +18,7 @@ input: description: | Groovy Map containing sample information e.g. `[ id:'test', single_end:false ]` - + - dwi: type: file description: Nifti DWI volume to perform BET + crop. diff --git a/nf-scil-extensions/.vscode/launch.json b/nf-scil-extensions/.vscode/launch.json index 0e191b592..ace09d79f 100644 --- a/nf-scil-extensions/.vscode/launch.json +++ b/nf-scil-extensions/.vscode/launch.json @@ -3,15 +3,13 @@ // Hover to view descriptions of existing attributes. // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 { - "version": "0.2.0", + "version": "0.2.0", "configurations": [ { "name": "Extension", "type": "extensionHost", "request": "launch", - "args": [ - "--extensionDevelopmentPath=${workspaceFolder}" - ] + "args": ["--extensionDevelopmentPath=${workspaceFolder}"] } ] -} \ No newline at end of file +} diff --git a/nf-scil-extensions/package.json b/nf-scil-extensions/package.json index 4d250188b..3b190d55c 100644 --- a/nf-scil-extensions/package.json +++ b/nf-scil-extensions/package.json @@ -1,51 +1,51 @@ { - "name": "nf-scil-extensions", - "displayName": "nf-scil-extensions", - "description": "Extension pack for VS Code development of nf-scil", - "repository": { - "type": "git", - "url": "https://github.com/scilus/nf-scil.git" - }, - "icon": "icon.avif", - "version": "0.0.2", - "publisher": "AlexVCaron", - "engines": { - "vscode": "^1.84.0" - }, - "categories": [ - "Extension Packs" - ], - "extensionPack": [ - "bierner.github-markdown-preview", - "bierner.markdown-checkbox", - "bierner.markdown-emoji", - "bierner.markdown-footnotes", - "bierner.markdown-mermaid", - "bierner.markdown-preview-github-styles", - "bierner.markdown-yaml-preamble", - "github.vscode-github-actions", - "GitHub.vscode-pull-request-github", - "mhutchie.git-graph", - "ms-azuretools.vscode-docker", - "ms-python.isort", - "ms-python.python", - "ms-python.vscode-pylance", - "ms-vscode-remote.remote-containers", - "ms-vscode-remote.remote-ssh", - "ms-vscode-remote.remote-ssh-edit", - "ms-vscode-remote.remote-wsl", - "ms-vscode-remote.vscode-remote-extensionpack", - "ms-vscode.cmake-tools", - "ms-vscode.cpptools", - "ms-vscode.cpptools-extension-pack", - "ms-vscode.cpptools-themes", - "ms-vscode.makefile-tools", - "ms-vscode.remote-explorer", - "ms-vscode.remote-server", - "ms-vsliveshare.vsliveshare", - "nf-core.nf-core-extensionpack", - "twxs.cmake", - "yahyabatulu.vscode-markdown-alert", - "yzhang.markdown-all-in-one" - ] + "name": "nf-scil-extensions", + "displayName": "nf-scil-extensions", + "description": "Extension pack for VS Code development of nf-scil", + "repository": { + "type": "git", + "url": "https://github.com/scilus/nf-scil.git" + }, + "icon": "icon.avif", + "version": "0.0.2", + "publisher": "AlexVCaron", + "engines": { + "vscode": "^1.84.0" + }, + "categories": [ + "Extension Packs" + ], + "extensionPack": [ + "bierner.github-markdown-preview", + "bierner.markdown-checkbox", + "bierner.markdown-emoji", + "bierner.markdown-footnotes", + "bierner.markdown-mermaid", + "bierner.markdown-preview-github-styles", + "bierner.markdown-yaml-preamble", + "github.vscode-github-actions", + "GitHub.vscode-pull-request-github", + "mhutchie.git-graph", + "ms-azuretools.vscode-docker", + "ms-python.isort", + "ms-python.python", + "ms-python.vscode-pylance", + "ms-vscode-remote.remote-containers", + "ms-vscode-remote.remote-ssh", + "ms-vscode-remote.remote-ssh-edit", + "ms-vscode-remote.remote-wsl", + "ms-vscode-remote.vscode-remote-extensionpack", + "ms-vscode.cmake-tools", + "ms-vscode.cpptools", + "ms-vscode.cpptools-extension-pack", + "ms-vscode.cpptools-themes", + "ms-vscode.makefile-tools", + "ms-vscode.remote-explorer", + "ms-vscode.remote-server", + "ms-vsliveshare.vsliveshare", + "nf-core.nf-core-extensionpack", + "twxs.cmake", + "yahyabatulu.vscode-markdown-alert", + "yzhang.markdown-all-in-one" + ] } diff --git a/nf-scil-extensions/vsc-extension-quickstart.md b/nf-scil-extensions/vsc-extension-quickstart.md index 2086acfec..1b1f14a4f 100644 --- a/nf-scil-extensions/vsc-extension-quickstart.md +++ b/nf-scil-extensions/vsc-extension-quickstart.md @@ -2,20 +2,20 @@ ## What's in the folder -* This folder contains all of the files necessary for your extension pack. -* `package.json` - this is the manifest file that defines the list of extensions of the extension pack. +- This folder contains all of the files necessary for your extension pack. +- `package.json` - this is the manifest file that defines the list of extensions of the extension pack. ## Get up and running straight away -* Press `F5` to open a new window with your extension loaded. -* Open `Extensions Viewlet` and check your extensions are installed. +- Press `F5` to open a new window with your extension loaded. +- Open `Extensions Viewlet` and check your extensions are installed. ## Make changes -* You can relaunch the extension from the debug toolbar after making changes to the files listed above. -* You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes. +- You can relaunch the extension from the debug toolbar after making changes to the files listed above. +- You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes. ## Install your extension -* To start using your extension with Visual Studio Code copy it into the `/.vscode/extensions` folder and restart Code. -* To share your extension with the world, read on https://code.visualstudio.com/docs about publishing an extension. +- To start using your extension with Visual Studio Code copy it into the `/.vscode/extensions` folder and restart Code. +- To share your extension with the world, read on https://code.visualstudio.com/docs about publishing an extension. diff --git a/tests/modules/nf-scil/tracking/localtracking/test.yml b/tests/modules/nf-scil/tracking/localtracking/test.yml index 612414541..f3c40f62a 100644 --- a/tests/modules/nf-scil/tracking/localtracking/test.yml +++ b/tests/modules/nf-scil/tracking/localtracking/test.yml @@ -8,7 +8,7 @@ md5sum: e4e926fce399749c2cd68c92eccf04be - path: output/wm/test__local_tracking.trk contains: - - '# TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead ' + - "# TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead " - path: output/wm/test__local_tracking_config.json md5sum: c250bbac61d4f4a377d29dc0be4efada - path: output/wm/test__local_tracking_mask.nii.gz @@ -25,7 +25,7 @@ md5sum: 49c19bc6c0f47af32dea7af992b40490 - path: output/fa/test__local_tracking.trk contains: - - '# TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead ' + - "# TODO nf-core: file md5sum was variable, please replace this text with a string found in the file instead " - path: output/fa/test__local_tracking_config.json md5sum: 2e37c9f077b7d1422c54b39be37590db - path: output/fa/test__local_tracking_mask.nii.gz From 7a72b39a006786157f0147ff9818b08e68bbc3c8 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 8 Jan 2024 21:38:56 +0000 Subject: [PATCH 17/41] add other ignores for prettier --- .prettierignore | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.prettierignore b/.prettierignore index 701ff1e7b..bf54d8208 100644 --- a/.prettierignore +++ b/.prettierignore @@ -8,3 +8,8 @@ nf_core/module-template/tests/tags.yml nf_core/subworkflow-template/tests/tags.yml .github .devcontainer +.vscode +venv +.venv +.test_data +.pytest_cache From 45495b3c4dd3d6fdcec42b6d880658836b31a46e Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 8 Jan 2024 22:00:17 +0000 Subject: [PATCH 18/41] fix editorconfig --- .devcontainer/Dockerfile | 22 ++++---- .github/ISSUE_TEMPLATE/bug_report.md | 6 +-- .gitignore | 2 +- docs/generate_scil_data_md.py | 4 +- modules/nf-scil/betcrop/fslbetcrop/main.nf | 2 +- modules/nf-scil/denoising/mppca/main.nf | 2 +- modules/nf-scil/denoising/nlmeans/main.nf | 2 +- modules/nf-scil/reconst/fodf/main.nf | 4 +- modules/nf-scil/reconst/frf/main.nf | 3 +- modules/nf-scil/reconst/noddi/main.nf | 2 +- .../segmentation/freesurferseg/main.nf | 50 ++++++++++--------- .../nf-scil/tracking/localtracking/main.nf | 2 +- modules/nf-scil/tracking/pfttracking/main.nf | 2 +- 13 files changed, 53 insertions(+), 50 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index ab225177f..c2dad6abe 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,11 +1,11 @@ -FROM scilus/scilus:1.6.0 - -RUN apt update && apt install -y \ - git \ - python3-venv \ - wget \ - && rm -rf /var/lib/apt/lists/* - -RUN python3 -m pip install pipx && \ - python3 -m pipx ensurepath && \ - pipx install poetry +FROM scilus/scilus:1.6.0 + +RUN apt update && apt install -y \ + git \ + python3-venv \ + wget \ + && rm -rf /var/lib/apt/lists/* + +RUN python3 -m pip install pipx && \ + python3 -m pipx ensurepath && \ + pipx install poetry diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index c887f4986..8ed9b5239 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -8,9 +8,9 @@ assignees: AlexVCaron --- **System (complete the following information, else the issue will be closed):** - - OS: [e.g. Ubuntu, CentOS, other Linux distributions, MacOS, WSL, Windows, ...] - - Python version [e.g. 3.10, PyPa 3.7, CPython 3.9, ...] - - `nf-scil` version [e.g. 1.0.0, 1.2.post3, 0.1.dev0, main, feat/awesome_module, ...] + - OS: [e.g. Ubuntu, CentOS, other Linux distributions, MacOS, WSL, Windows, ...] + - Python version [e.g. 3.10, PyPa 3.7, CPython 3.9, ...] + - `nf-scil` version [e.g. 1.0.0, 1.2.post3, 0.1.dev0, main, feat/awesome_module, ...] **Describe the bug** A clear and concise description of what the bug is. diff --git a/.gitignore b/.gitignore index 9c575d5ed..f56ee8e99 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,4 @@ *.vsix .DS_Store -*.code-workspace \ No newline at end of file +*.code-workspace diff --git a/docs/generate_scil_data_md.py b/docs/generate_scil_data_md.py index ae006cd4a..567215512 100644 --- a/docs/generate_scil_data_md.py +++ b/docs/generate_scil_data_md.py @@ -21,7 +21,7 @@ def _unpack_archive(_upload_root, _archive, _test_dict, _fetcher): _fetcher(_test_dict, _archive) return os.path.join(_upload_root, ".".join(_archive.split(".")[:-1])) - + def main(): @@ -69,4 +69,4 @@ def main(): if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/modules/nf-scil/betcrop/fslbetcrop/main.nf b/modules/nf-scil/betcrop/fslbetcrop/main.nf index ba25ac9ae..f1acaee0c 100644 --- a/modules/nf-scil/betcrop/fslbetcrop/main.nf +++ b/modules/nf-scil/betcrop/fslbetcrop/main.nf @@ -54,7 +54,7 @@ process BETCROP_FSLBETCROP { stub: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - + """ scil_extract_b0.py -h bet -h diff --git a/modules/nf-scil/denoising/mppca/main.nf b/modules/nf-scil/denoising/mppca/main.nf index c2ff182a5..15b6b1252 100644 --- a/modules/nf-scil/denoising/mppca/main.nf +++ b/modules/nf-scil/denoising/mppca/main.nf @@ -20,7 +20,7 @@ process DENOISING_MPPCA { script: def prefix = task.ext.prefix ?: "${meta.id}" def extent = task.ext.extent ? "-extent " + task.ext.extent : "" - + """ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1 export OMP_NUM_THREADS=1 diff --git a/modules/nf-scil/denoising/nlmeans/main.nf b/modules/nf-scil/denoising/nlmeans/main.nf index c913d6290..097ba09b6 100644 --- a/modules/nf-scil/denoising/nlmeans/main.nf +++ b/modules/nf-scil/denoising/nlmeans/main.nf @@ -37,7 +37,7 @@ process DENOISING_NLMEANS { stub: def prefix = task.ext.prefix ?: "${meta.id}" - + """ scil_run_nlmeans.py -h diff --git a/modules/nf-scil/reconst/fodf/main.nf b/modules/nf-scil/reconst/fodf/main.nf index 0bff1bd66..52f1e42a3 100644 --- a/modules/nf-scil/reconst/fodf/main.nf +++ b/modules/nf-scil/reconst/fodf/main.nf @@ -52,11 +52,11 @@ process RECONST_FODF { scil_extract_dwi_shell.py $dwi $bval $bvec $fodf_shells \ dwi_fodf_shells.nii.gz bval_fodf_shells bvec_fodf_shells \ $dwi_shell_tolerance -f - + scil_compute_ssst_fodf.py dwi_fodf_shells.nii.gz bval_fodf_shells bvec_fodf_shells $frf ${prefix}__fodf.nii.gz \ $sh_order $sh_basis --force_b0_threshold \ $set_mask $processes - + scil_compute_fodf_max_in_ventricles.py ${prefix}__fodf.nii.gz $fa $md \ --max_value_output ventricles_fodf_max_value.txt $sh_basis \ $fa_threshold $md_threshold -f diff --git a/modules/nf-scil/reconst/frf/main.nf b/modules/nf-scil/reconst/frf/main.nf index 7e3856952..e2743767b 100644 --- a/modules/nf-scil/reconst/frf/main.nf +++ b/modules/nf-scil/reconst/frf/main.nf @@ -15,7 +15,6 @@ process RECONST_FRF { tuple val(meta), path("*__frf.txt") , emit: frf path "versions.yml" , emit: versions - when: task.ext.when == null || task.ext.when @@ -49,7 +48,7 @@ process RECONST_FRF { stub: def prefix = task.ext.prefix ?: "${meta.id}" - + """ scil_compute_ssst_frf.py -h scil_set_response_function.py -h diff --git a/modules/nf-scil/reconst/noddi/main.nf b/modules/nf-scil/reconst/noddi/main.nf index 45fbbfa03..f7fe963b8 100644 --- a/modules/nf-scil/reconst/noddi/main.nf +++ b/modules/nf-scil/reconst/noddi/main.nf @@ -37,7 +37,7 @@ process RECONST_NODDI { """ scil_compute_NODDI.py $dwi $bval $bvec $para_diff $iso_diff $lambda1 \ $lambda2 $nb_threads $b_thr $set_mask $set_kernels - + if [ ! -z "$kernels" ]; then mv results/FIT_dir.nii.gz ${prefix}__FIT_dir.nii.gz diff --git a/modules/nf-scil/segmentation/freesurferseg/main.nf b/modules/nf-scil/segmentation/freesurferseg/main.nf index f7bb15678..2cee624fa 100644 --- a/modules/nf-scil/segmentation/freesurferseg/main.nf +++ b/modules/nf-scil/segmentation/freesurferseg/main.nf @@ -39,36 +39,40 @@ process SEGMENTATION_FREESURFERSEG { scil_split_volume_by_labels.py aparc+aseg_int16.nii.gz --scilpy_lut freesurfer_subcortical --out_dir aparc+aseg_subcortical scil_image_math.py union wmparc_desikan/*\ - wmparc_subcortical/right-cerebellum-cortex.nii.gz\ - wmparc_subcortical/left-cerebellum-cortex.nii.gz\ - mask_cortex_m.nii.gz -f + wmparc_subcortical/right-cerebellum-cortex.nii.gz\ + wmparc_subcortical/left-cerebellum-cortex.nii.gz\ + mask_cortex_m.nii.gz -f + scil_image_math.py union wmparc_subcortical/corpus-callosum-*\ - aparc+aseg_subcortical/*white-matter*\ - wmparc_subcortical/brain-stem.nii.gz\ - aparc+aseg_subcortical/*ventraldc*\ - mask_wm_m.nii.gz -f + aparc+aseg_subcortical/*white-matter*\ + wmparc_subcortical/brain-stem.nii.gz\ + aparc+aseg_subcortical/*ventraldc*\ + mask_wm_m.nii.gz -f + scil_image_math.py union wmparc_subcortical/*thalamus*\ - wmparc_subcortical/*putamen*\ - wmparc_subcortical/*pallidum*\ - wmparc_subcortical/*hippocampus*\ - wmparc_subcortical/*caudate*\ - wmparc_subcortical/*amygdala*\ - wmparc_subcortical/*accumbens*\ - wmparc_subcortical/*plexus*\ - mask_nuclei_m.nii.gz -f + wmparc_subcortical/*putamen*\ + wmparc_subcortical/*pallidum*\ + wmparc_subcortical/*hippocampus*\ + wmparc_subcortical/*caudate*\ + wmparc_subcortical/*amygdala*\ + wmparc_subcortical/*accumbens*\ + wmparc_subcortical/*plexus*\ + mask_nuclei_m.nii.gz -f + scil_image_math.py union wmparc_subcortical/*-lateral-ventricle.nii.gz\ - wmparc_subcortical/*-inferior-lateral-ventricle.nii.gz\ - wmparc_subcortical/cerebrospinal-fluid.nii.gz\ - wmparc_subcortical/*th-ventricle.nii.gz\ - mask_csf_1_m.nii.gz -f + wmparc_subcortical/*-inferior-lateral-ventricle.nii.gz\ + wmparc_subcortical/cerebrospinal-fluid.nii.gz\ + wmparc_subcortical/*th-ventricle.nii.gz\ + mask_csf_1_m.nii.gz -f + scil_image_math.py lower_threshold mask_wm_m.nii.gz 0.1\ - ${prefix}__mask_wm_bin.nii.gz -f + ${prefix}__mask_wm_bin.nii.gz -f scil_image_math.py lower_threshold mask_cortex_m.nii.gz 0.1\ - ${prefix}__mask_gm.nii.gz -f + ${prefix}__mask_gm.nii.gz -f scil_image_math.py lower_threshold mask_nuclei_m.nii.gz 0.1\ - ${prefix}__mask_nuclei_bin.nii.gz -f + ${prefix}__mask_nuclei_bin.nii.gz -f scil_image_math.py lower_threshold mask_csf_1_m.nii.gz 0.1\ - ${prefix}__mask_csf.nii.gz -f + ${prefix}__mask_csf.nii.gz -f scil_image_math.py addition ${prefix}__mask_wm_bin.nii.gz\ ${prefix}__mask_nuclei_bin.nii.gz\ ${prefix}__mask_wm.nii.gz --data_type int16 diff --git a/modules/nf-scil/tracking/localtracking/main.nf b/modules/nf-scil/tracking/localtracking/main.nf index 69cbc0efd..ac4ab1bb4 100644 --- a/modules/nf-scil/tracking/localtracking/main.nf +++ b/modules/nf-scil/tracking/localtracking/main.nf @@ -59,7 +59,7 @@ process TRACKING_LOCALTRACKING { elif [ "${local_seeding_mask}" == "fa" ]; then mrcalc $fa $local_fa_seeding_mask_threshold -ge ${prefix}__local_seeding_mask.nii.gz\ - -datatype uint8 + -datatype uint8 fi scil_compute_local_tracking.py $fodf ${prefix}__local_seeding_mask.nii.gz ${prefix}__local_tracking_mask.nii.gz tmp.trk\ diff --git a/modules/nf-scil/tracking/pfttracking/main.nf b/modules/nf-scil/tracking/pfttracking/main.nf index 3e9eb59e0..563b8354f 100644 --- a/modules/nf-scil/tracking/pfttracking/main.nf +++ b/modules/nf-scil/tracking/pfttracking/main.nf @@ -108,7 +108,7 @@ process TRACKING_PFTTRACKING { stub: def prefix = task.ext.prefix ?: "${meta.id}" - + """ scil_compute_pft.py -h scil_compute_maps_for_particle_filter_tracking.py -h From 1917a4f18745c64d7537cd792bbc50f973639da8 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 8 Jan 2024 22:06:08 +0000 Subject: [PATCH 19/41] fix indent for editorconfig --- .editorconfig | 3 + docs/deps/gh-md-toc | 68 ++++++++++---------- modules/nf-scil/tracking/pfttracking/main.nf | 2 +- modules/nf-scil/utils/extractb0/main.nf | 2 +- 4 files changed, 39 insertions(+), 36 deletions(-) diff --git a/.editorconfig b/.editorconfig index 014c2383b..f640980e2 100644 --- a/.editorconfig +++ b/.editorconfig @@ -10,3 +10,6 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js,cff}] indent_size = 2 + +[LICENSE] +indent_size = 1 diff --git a/docs/deps/gh-md-toc b/docs/deps/gh-md-toc index fbe76eeb9..241eb7ed4 100755 --- a/docs/deps/gh-md-toc +++ b/docs/deps/gh-md-toc @@ -70,9 +70,9 @@ gh_toc_md2html() { local gh_tmp_file_md=$gh_file_md if [ "$skip_header" = "yes" ]; then if grep -Fxq "" "$gh_src"; then - # cut everything before the toc - gh_tmp_file_md=$gh_file_md~~ - sed '1,//d' $gh_file_md > $gh_tmp_file_md + # cut everything before the toc + gh_tmp_file_md=$gh_file_md~~ + sed '1,//d' $gh_file_md > $gh_tmp_file_md fi fi @@ -154,16 +154,16 @@ gh_toc(){ else local rawhtml=$(gh_toc_md2html "$gh_src" "$skip_header") if [ "$rawhtml" == "XXNetworkErrorXX" ]; then - echo "Parsing local markdown file requires access to github API" - echo "Please make sure curl is installed and check your network connectivity" - exit 1 + echo "Parsing local markdown file requires access to github API" + echo "Please make sure curl is installed and check your network connectivity" + exit 1 fi if [ "$rawhtml" == "XXRateLimitXX" ]; then - echo "Parsing local markdown file requires access to github API" - echo "Error: You exceeded the hourly limit. See: https://developer.github.com/v3/#rate-limiting" - TOKEN_FILE="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/token.txt" - echo "or place GitHub auth token here: ${TOKEN_FILE}" - exit 1 + echo "Parsing local markdown file requires access to github API" + echo "Error: You exceeded the hourly limit. See: https://developer.github.com/v3/#rate-limiting" + TOKEN_FILE="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/token.txt" + echo "or place GitHub auth token here: ${TOKEN_FILE}" + exit 1 fi local toc=`echo "$rawhtml" | gh_toc_grab "$gh_src_copy" "$indent"` echo "$toc" @@ -221,21 +221,21 @@ gh_toc_grab() { href_regex="/href=\"[^\"]+?\"/" common_awk_script=' - modified_href = "" - split(href, chars, "") - for (i=1;i <= length(href); i++) { - c = chars[i] - res = "" - if (c == "+") { - res = " " - } else { - if (c == "%") { - res = "\\x" - } else { - res = c "" - } - } - modified_href = modified_href res + modified_href = "" + split(href, chars, "") + for (i=1;i <= length(href); i++) { + c = chars[i] + res = "" + if (c == "+") { + res = " " + } else { + if (c == "%") { + res = "\\x" + } else { + res = c "" + } + } + modified_href = modified_href res } print sprintf("%*s", (level-1)*'"$2"', "") "* [" text "](" gh_url modified_href ")" ' @@ -243,19 +243,19 @@ gh_toc_grab() { grepcmd="pcregrep -o" echoargs="" awkscript='{ - level = substr($0, 3, 1) - text = substr($0, match($0, /<\/span><\/a>[^<]*<\/h/)+11, RLENGTH-14) - href = substr($0, match($0, '$href_regex')+6, RLENGTH-7) - '"$common_awk_script"' + level = substr($0, 3, 1) + text = substr($0, match($0, /<\/span><\/a>[^<]*<\/h/)+11, RLENGTH-14) + href = substr($0, match($0, '$href_regex')+6, RLENGTH-7) + '"$common_awk_script"' }' else grepcmd="grep -Eo" echoargs="-e" awkscript='{ - level = substr($0, 3, 1) - text = substr($0, match($0, /<\/span><\/a>.*<\/h/)+11, RLENGTH-14) - href = substr($0, match($0, '$href_regex')+6, RLENGTH-7) - '"$common_awk_script"' + level = substr($0, 3, 1) + text = substr($0, match($0, /<\/span><\/a>.*<\/h/)+11, RLENGTH-14) + href = substr($0, match($0, '$href_regex')+6, RLENGTH-7) + '"$common_awk_script"' }' fi diff --git a/modules/nf-scil/tracking/pfttracking/main.nf b/modules/nf-scil/tracking/pfttracking/main.nf index 563b8354f..a2e44248c 100644 --- a/modules/nf-scil/tracking/pfttracking/main.nf +++ b/modules/nf-scil/tracking/pfttracking/main.nf @@ -66,7 +66,7 @@ process TRACKING_PFTTRACKING { elif [ "${pft_seeding_mask}" == "fa" ]; then mrcalc $fa $pft_fa_threshold -ge ${prefix}__pft_seeding_mask.nii.gz\ - -datatype uint8 + -datatype uint8 fi scil_compute_pft.py $fodf ${prefix}__pft_seeding_mask.nii.gz \ diff --git a/modules/nf-scil/utils/extractb0/main.nf b/modules/nf-scil/utils/extractb0/main.nf index 1e199069a..51a2b44af 100644 --- a/modules/nf-scil/utils/extractb0/main.nf +++ b/modules/nf-scil/utils/extractb0/main.nf @@ -40,7 +40,7 @@ process UTILS_EXTRACTB0 { stub: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - + """ scil_extract_b0.py - h From 2460df5f328eaa0c8585417e12fadb7a54406827 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Mon, 8 Jan 2024 22:09:05 +0000 Subject: [PATCH 20/41] add schema for metadata --- modules/nf-scil/meta-schema.json | 186 +++++++++++++++++++++++++++++++ 1 file changed, 186 insertions(+) create mode 100644 modules/nf-scil/meta-schema.json diff --git a/modules/nf-scil/meta-schema.json b/modules/nf-scil/meta-schema.json new file mode 100644 index 000000000..0b855d76d --- /dev/null +++ b/modules/nf-scil/meta-schema.json @@ -0,0 +1,186 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "title": "Meta yaml", + "description": "Validate the meta yaml file for an nf-core module", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the module" + }, + "description": { + "type": "string", + "description": "Description of the module" + }, + "keywords": { + "type": "array", + "description": "Keywords for the module", + "items": { + "type": "string" + }, + "minItems": 3 + }, + "authors": { + "type": "array", + "description": "Authors of the module", + "items": { + "type": "string" + } + }, + "maintainers": { + "type": "array", + "description": "Maintainers of the module", + "items": { + "type": "string" + } + }, + "input": { + "type": "array", + "description": "Input channels for the module", + "items": { + "type": "object", + "patternProperties": { + ".*": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Type of the input channel", + "enum": ["map", "file", "directory", "string", "integer", "float", "boolean", "list"] + }, + "description": { + "type": "string", + "description": "Description of the input channel" + }, + "pattern": { + "type": "string", + "description": "Pattern of the input channel, given in Java glob syntax" + }, + "default": { + "type": ["string", "number", "boolean", "array", "object"], + "description": "Default value for the input channel" + }, + "enum": { + "type": "array", + "description": "List of allowed values for the input channel", + "items": { + "type": ["string", "number", "boolean", "array", "object"] + }, + "uniqueItems": true + } + }, + "required": ["type", "description"] + } + } + } + }, + "output": { + "type": "array", + "description": "Output channels for the module", + "items": { + "type": "object", + "patternProperties": { + ".*": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Type of the output channel", + "enum": ["map", "file", "directory", "string", "integer", "float", "boolean", "list"] + }, + "description": { + "type": "string", + "description": "Description of the output channel" + }, + "pattern": { + "type": "string", + "description": "Pattern of the input channel, given in Java glob syntax" + }, + "enum": { + "type": "array", + "description": "List of allowed values for the output channel", + "items": { + "type": ["string", "number", "boolean", "array", "object"] + }, + "uniqueItems": true + } + }, + "required": ["type", "description"] + } + } + } + }, + "tools": { + "type": "array", + "description": "Tools used by the module", + "items": { + "type": "object", + "patternProperties": { + ".*": { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "Description of the output channel" + }, + "homepage": { + "type": "string", + "description": "Homepage of the tool", + "pattern": "^(http|https)://.*$" + }, + "documentation": { + "type": "string", + "description": "Documentation of the tool", + "pattern": "^(http|https|ftp)://.*$" + }, + "tool_dev_url": { + "type": "string", + "description": "URL of the development version of the tool's documentation", + "pattern": "^(http|https)://.*$" + }, + "doi": { + "description": "DOI of the tool", + "anyOf": [ + { + "type": "string", + "pattern": "^10\\.\\d{4,9}\\/[^,]+$" + }, + { + "type": "string", + "enum": ["no DOI available"] + } + ] + }, + "licence": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Licence of the tool", + "minItems": 1, + "uniqueItems": true, + "message": "Licence must be an array of one or more entries, e.g. [\"MIT\"]" + } + }, + "required": ["description"], + "anyOf": [ + { + "required": ["homepage"] + }, + { + "required": ["documentation"] + }, + { + "required": ["tool_dev_url"] + }, + { + "required": ["doi"] + } + ] + } + } + } + } + }, + "required": ["name", "description", "keywords", "authors", "output", "tools"] +} From a6fbb79033d00a0415520595a41970f622bb31fc Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 9 Jan 2024 04:22:02 +0000 Subject: [PATCH 21/41] fix --- modules/nf-scil/meta-schema.json | 186 ------------------------------- 1 file changed, 186 deletions(-) delete mode 100644 modules/nf-scil/meta-schema.json diff --git a/modules/nf-scil/meta-schema.json b/modules/nf-scil/meta-schema.json deleted file mode 100644 index 0b855d76d..000000000 --- a/modules/nf-scil/meta-schema.json +++ /dev/null @@ -1,186 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema", - "title": "Meta yaml", - "description": "Validate the meta yaml file for an nf-core module", - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "Name of the module" - }, - "description": { - "type": "string", - "description": "Description of the module" - }, - "keywords": { - "type": "array", - "description": "Keywords for the module", - "items": { - "type": "string" - }, - "minItems": 3 - }, - "authors": { - "type": "array", - "description": "Authors of the module", - "items": { - "type": "string" - } - }, - "maintainers": { - "type": "array", - "description": "Maintainers of the module", - "items": { - "type": "string" - } - }, - "input": { - "type": "array", - "description": "Input channels for the module", - "items": { - "type": "object", - "patternProperties": { - ".*": { - "type": "object", - "properties": { - "type": { - "type": "string", - "description": "Type of the input channel", - "enum": ["map", "file", "directory", "string", "integer", "float", "boolean", "list"] - }, - "description": { - "type": "string", - "description": "Description of the input channel" - }, - "pattern": { - "type": "string", - "description": "Pattern of the input channel, given in Java glob syntax" - }, - "default": { - "type": ["string", "number", "boolean", "array", "object"], - "description": "Default value for the input channel" - }, - "enum": { - "type": "array", - "description": "List of allowed values for the input channel", - "items": { - "type": ["string", "number", "boolean", "array", "object"] - }, - "uniqueItems": true - } - }, - "required": ["type", "description"] - } - } - } - }, - "output": { - "type": "array", - "description": "Output channels for the module", - "items": { - "type": "object", - "patternProperties": { - ".*": { - "type": "object", - "properties": { - "type": { - "type": "string", - "description": "Type of the output channel", - "enum": ["map", "file", "directory", "string", "integer", "float", "boolean", "list"] - }, - "description": { - "type": "string", - "description": "Description of the output channel" - }, - "pattern": { - "type": "string", - "description": "Pattern of the input channel, given in Java glob syntax" - }, - "enum": { - "type": "array", - "description": "List of allowed values for the output channel", - "items": { - "type": ["string", "number", "boolean", "array", "object"] - }, - "uniqueItems": true - } - }, - "required": ["type", "description"] - } - } - } - }, - "tools": { - "type": "array", - "description": "Tools used by the module", - "items": { - "type": "object", - "patternProperties": { - ".*": { - "type": "object", - "properties": { - "description": { - "type": "string", - "description": "Description of the output channel" - }, - "homepage": { - "type": "string", - "description": "Homepage of the tool", - "pattern": "^(http|https)://.*$" - }, - "documentation": { - "type": "string", - "description": "Documentation of the tool", - "pattern": "^(http|https|ftp)://.*$" - }, - "tool_dev_url": { - "type": "string", - "description": "URL of the development version of the tool's documentation", - "pattern": "^(http|https)://.*$" - }, - "doi": { - "description": "DOI of the tool", - "anyOf": [ - { - "type": "string", - "pattern": "^10\\.\\d{4,9}\\/[^,]+$" - }, - { - "type": "string", - "enum": ["no DOI available"] - } - ] - }, - "licence": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Licence of the tool", - "minItems": 1, - "uniqueItems": true, - "message": "Licence must be an array of one or more entries, e.g. [\"MIT\"]" - } - }, - "required": ["description"], - "anyOf": [ - { - "required": ["homepage"] - }, - { - "required": ["documentation"] - }, - { - "required": ["tool_dev_url"] - }, - { - "required": ["doi"] - } - ] - } - } - } - } - }, - "required": ["name", "description", "keywords", "authors", "output", "tools"] -} From 118c1ff6935df2a91ec6f743522f99bfffb3787d Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 9 Jan 2024 04:34:52 +0000 Subject: [PATCH 22/41] fix linting --- modules/nf-scil/reconst/frf/meta.yml | 6 +++--- modules/nf-scil/reconst/meanfrf/meta.yml | 5 ++++- modules/nf-scil/reconst/noddi/meta.yml | 3 ++- modules/nf-scil/tracking/localtracking/meta.yml | 1 + modules/nf-scil/tracking/pfttracking/meta.yml | 5 +++-- 5 files changed, 13 insertions(+), 7 deletions(-) diff --git a/modules/nf-scil/reconst/frf/meta.yml b/modules/nf-scil/reconst/frf/meta.yml index b0210067f..076590487 100644 --- a/modules/nf-scil/reconst/frf/meta.yml +++ b/modules/nf-scil/reconst/frf/meta.yml @@ -3,9 +3,9 @@ name: "reconst_frf" description: Compute a single Fiber Response Function from a DWI. keywords: - - DWI - - FRF - + - Fiber Orientation Distribution Function + - Diffusion MRI + - Fiber Response Function tools: - "DIPY": description: "DIPY is the paragon 3D/4D+ imaging library in Python." diff --git a/modules/nf-scil/reconst/meanfrf/meta.yml b/modules/nf-scil/reconst/meanfrf/meta.yml index a37a30d5c..9d553c8e1 100644 --- a/modules/nf-scil/reconst/meanfrf/meta.yml +++ b/modules/nf-scil/reconst/meanfrf/meta.yml @@ -4,7 +4,10 @@ name: "reconst_meanfrf" description: Compute the mean Fiber Response Function from a set of individually computed Response Functions. keywords: - - FRF + - Fiber Orientation Distribution Function + - Diffusion MRI + - Fiber Response Function + - Average tools: - "Scilpy": diff --git a/modules/nf-scil/reconst/noddi/meta.yml b/modules/nf-scil/reconst/noddi/meta.yml index 105372982..ba05e99aa 100644 --- a/modules/nf-scil/reconst/noddi/meta.yml +++ b/modules/nf-scil/reconst/noddi/meta.yml @@ -3,8 +3,9 @@ name: "reconst_noddi" description: Run NODDI modelling pipeline using AMICO from DWI data. keywords: - - DWI + - Diffusion MRI - NODDI + - Microstructure modeling tools: - "Scilpy": description: "The Sherbrooke Connectivity Imaging Lab (SCIL) Python dMRI processing toolbox." diff --git a/modules/nf-scil/tracking/localtracking/meta.yml b/modules/nf-scil/tracking/localtracking/meta.yml index 3fb33a2b8..7d9fbea5a 100644 --- a/modules/nf-scil/tracking/localtracking/meta.yml +++ b/modules/nf-scil/tracking/localtracking/meta.yml @@ -4,6 +4,7 @@ description: process local tracking keywords: - Tractography - Local tracking + - Diffusion MRI tools: - "Scilpy": description: "The Sherbrooke Connectivity Imaging Lab (SCIL) Python dMRI processing toolbox." diff --git a/modules/nf-scil/tracking/pfttracking/meta.yml b/modules/nf-scil/tracking/pfttracking/meta.yml index d2b8c5167..0e15e72e5 100644 --- a/modules/nf-scil/tracking/pfttracking/meta.yml +++ b/modules/nf-scil/tracking/pfttracking/meta.yml @@ -6,8 +6,9 @@ description: Compute include and exclude maps, and the seeding mask Generates a tractogram using anatomically-constrained particle filter tracking, Particle Filtering Tractography (PFT). keywords: - - PFT - - tractography + - Diffusion MRI + - Tractography + - Particle Filtering Tractography tools: - "DIPY": From a25331c756754f5312fe9b2d3e44b502090d24b7 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 9 Jan 2024 04:38:25 +0000 Subject: [PATCH 23/41] fix linting --- modules/nf-scil/reconst/meanfrf/meta.yml | 2 +- modules/nf-scil/reconst/noddi/meta.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/nf-scil/reconst/meanfrf/meta.yml b/modules/nf-scil/reconst/meanfrf/meta.yml index 9d553c8e1..f1d40db9b 100644 --- a/modules/nf-scil/reconst/meanfrf/meta.yml +++ b/modules/nf-scil/reconst/meanfrf/meta.yml @@ -16,7 +16,7 @@ tools: input: - frf_list: - type: Path + type: list description: List of individual Fiber Response Function (FRF) path. pattern: "*.txt" diff --git a/modules/nf-scil/reconst/noddi/meta.yml b/modules/nf-scil/reconst/noddi/meta.yml index ba05e99aa..cb6cae235 100644 --- a/modules/nf-scil/reconst/noddi/meta.yml +++ b/modules/nf-scil/reconst/noddi/meta.yml @@ -46,7 +46,7 @@ input: pattern: "*mask.{nii,nii.gz}" - kernels: - type: folder + type: directory description: Folder containg kernels. pattern: "kernels" @@ -83,7 +83,7 @@ output: pattern: "*__FIT_OD.{nii,nii.gz}" - kernels: - type: folder + type: directory description: Folder containg kernels. pattern: "kernels" From ccdee41fd41d582dd20ed9c165deb15cb0fccdab Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 9 Jan 2024 04:49:30 +0000 Subject: [PATCH 24/41] reinstate publishdir --- tests/modules/nf-scil/betcrop/fslbetcrop/nextflow.config | 2 +- .../nf-scil/reconst/diffusivitypriors/nextflow.config | 1 + tests/modules/nf-scil/reconst/dtimetrics/nextflow.config | 7 ++++--- tests/modules/nf-scil/reconst/fodf/nextflow.config | 4 ++-- tests/modules/nf-scil/reconst/frf/nextflow.config | 5 ++--- tests/modules/nf-scil/reconst/meanfrf/nextflow.config | 3 ++- tests/modules/nf-scil/reconst/noddi/nextflow.config | 3 ++- .../nf-scil/segmentation/freesurferseg/nextflow.config | 8 ++------ .../nf-scil/tracking/localtracking/nextflow.config | 5 +++-- .../modules/nf-scil/tracking/pfttracking/nextflow.config | 3 ++- 10 files changed, 21 insertions(+), 20 deletions(-) diff --git a/tests/modules/nf-scil/betcrop/fslbetcrop/nextflow.config b/tests/modules/nf-scil/betcrop/fslbetcrop/nextflow.config index a862d22b7..32f7bc96f 100644 --- a/tests/modules/nf-scil/betcrop/fslbetcrop/nextflow.config +++ b/tests/modules/nf-scil/betcrop/fslbetcrop/nextflow.config @@ -2,7 +2,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - withNAME: "BETCROP_FSLBETCROP" { + withName: "BETCROP_FSLBETCROP" { ext.bet_dwi_f = 0.16 ext.b0_thr = 10 } diff --git a/tests/modules/nf-scil/reconst/diffusivitypriors/nextflow.config b/tests/modules/nf-scil/reconst/diffusivitypriors/nextflow.config index f832d4387..fa8c4a0a7 100644 --- a/tests/modules/nf-scil/reconst/diffusivitypriors/nextflow.config +++ b/tests/modules/nf-scil/reconst/diffusivitypriors/nextflow.config @@ -1,4 +1,5 @@ process { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: "RECONST_DIFFUSIVITYPRIORS" { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } diff --git a/tests/modules/nf-scil/reconst/dtimetrics/nextflow.config b/tests/modules/nf-scil/reconst/dtimetrics/nextflow.config index 9b36be53c..a7f3eaf86 100644 --- a/tests/modules/nf-scil/reconst/dtimetrics/nextflow.config +++ b/tests/modules/nf-scil/reconst/dtimetrics/nextflow.config @@ -1,7 +1,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - + withName: "RECONST_DTIMETRICS" { ext.ad = true ext.evecs = true @@ -18,7 +18,8 @@ process { ext.pulsation = true ext.residual = true } - withName: "test_reconst_dtimetrics_with_b0mask:RECONST_DTIMETRICS" { + + withName: "test_reconst_dtimetrics_with_b0mask:RECONST_DTIMETRICS" { ext.ad = false ext.evecs = false ext.evals = false @@ -34,4 +35,4 @@ process { ext.pulsation = false ext.residual = false } -} \ No newline at end of file +} diff --git a/tests/modules/nf-scil/reconst/fodf/nextflow.config b/tests/modules/nf-scil/reconst/fodf/nextflow.config index 847409318..c4de284c7 100644 --- a/tests/modules/nf-scil/reconst/fodf/nextflow.config +++ b/tests/modules/nf-scil/reconst/fodf/nextflow.config @@ -1,4 +1,5 @@ process { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: "RECONST_FODF" { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } @@ -28,5 +29,4 @@ process { ext.afd_sum = false ext.nufo = false } - -} \ No newline at end of file +} diff --git a/tests/modules/nf-scil/reconst/frf/nextflow.config b/tests/modules/nf-scil/reconst/frf/nextflow.config index bf1c9999b..6265affd9 100644 --- a/tests/modules/nf-scil/reconst/frf/nextflow.config +++ b/tests/modules/nf-scil/reconst/frf/nextflow.config @@ -1,7 +1,6 @@ process { - publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - + withName: "RECONST_FRF" { ext.fa = 0.7 ext.fa_min = 0.5 @@ -27,4 +26,4 @@ process { ext.set_frf = true ext.manual_frf = "15,4,4" } -} \ No newline at end of file +} diff --git a/tests/modules/nf-scil/reconst/meanfrf/nextflow.config b/tests/modules/nf-scil/reconst/meanfrf/nextflow.config index 662808e43..398430ccd 100644 --- a/tests/modules/nf-scil/reconst/meanfrf/nextflow.config +++ b/tests/modules/nf-scil/reconst/meanfrf/nextflow.config @@ -1,6 +1,7 @@ process { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: "RECONST_MEANFRF" { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } } -} \ No newline at end of file +} diff --git a/tests/modules/nf-scil/reconst/noddi/nextflow.config b/tests/modules/nf-scil/reconst/noddi/nextflow.config index b44a2a9b5..0b09884d9 100644 --- a/tests/modules/nf-scil/reconst/noddi/nextflow.config +++ b/tests/modules/nf-scil/reconst/noddi/nextflow.config @@ -1,4 +1,5 @@ process { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: "RECONST_NODDI" { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } @@ -9,4 +10,4 @@ process { ext.b_thr = 40 ext.nb_threads = 1 } -} \ No newline at end of file +} diff --git a/tests/modules/nf-scil/segmentation/freesurferseg/nextflow.config b/tests/modules/nf-scil/segmentation/freesurferseg/nextflow.config index 200d377f1..0293c16f9 100644 --- a/tests/modules/nf-scil/segmentation/freesurferseg/nextflow.config +++ b/tests/modules/nf-scil/segmentation/freesurferseg/nextflow.config @@ -1,7 +1,3 @@ process { - - withName: "SEGMENTATION_FREESURFERSEG" { - publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - } - -} \ No newline at end of file + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } +} diff --git a/tests/modules/nf-scil/tracking/localtracking/nextflow.config b/tests/modules/nf-scil/tracking/localtracking/nextflow.config index 122004637..868467212 100644 --- a/tests/modules/nf-scil/tracking/localtracking/nextflow.config +++ b/tests/modules/nf-scil/tracking/localtracking/nextflow.config @@ -1,4 +1,5 @@ process { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: "WM_TRACKING" { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } @@ -38,5 +39,5 @@ process { ext.local_max_len = 200 ext.basis = "descoteaux07" } - -} \ No newline at end of file + +} diff --git a/tests/modules/nf-scil/tracking/pfttracking/nextflow.config b/tests/modules/nf-scil/tracking/pfttracking/nextflow.config index 180db7b97..9f21b4e01 100644 --- a/tests/modules/nf-scil/tracking/pfttracking/nextflow.config +++ b/tests/modules/nf-scil/tracking/pfttracking/nextflow.config @@ -1,4 +1,5 @@ process { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: "WM_TRACKING" { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } @@ -63,4 +64,4 @@ process { ext.pft_front = 1 ext.basis = "descoteaux07" } -} \ No newline at end of file +} From 777e7fa1c3c27ef6cd7ad024f78458eb2cb9330d Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 9 Jan 2024 04:52:36 +0000 Subject: [PATCH 25/41] fix linting --- modules/nf-scil/reconst/diffusivitypriors/meta.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/nf-scil/reconst/diffusivitypriors/meta.yml b/modules/nf-scil/reconst/diffusivitypriors/meta.yml index 4f6f41e8c..cc3e8b596 100644 --- a/modules/nf-scil/reconst/diffusivitypriors/meta.yml +++ b/modules/nf-scil/reconst/diffusivitypriors/meta.yml @@ -34,7 +34,7 @@ input: pattern: "*.{nii,nii.gz}" - priors: - type: folder + type: directory description: Folder containg text files for iso and para diffusivity values. pattern: "priors" @@ -66,7 +66,7 @@ output: pattern: "*.txt" - priors: - type: folder + type: directory description: Folder containg text files for iso and para diffusivity values. pattern: "priors" From d0e779156257376212b52e0fe580d31419b51972 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 9 Jan 2024 05:14:47 +0000 Subject: [PATCH 26/41] add protocol to sif images --- docs/MODULE.md | 2 +- modules/nf-scil/preproc/gibbs/main.nf | 2 +- modules/nf-scil/preproc/n4/main.nf | 2 +- modules/nf-scil/testdata/scilpy/main.nf | 4 ++-- modules/nf-scil/tracking/localtracking/main.nf | 2 +- tests/modules/nf-scil/denoising/mppca/test.yml | 2 -- 6 files changed, 6 insertions(+), 8 deletions(-) diff --git a/docs/MODULE.md b/docs/MODULE.md index 7f0976da3..2f4240e95 100644 --- a/docs/MODULE.md +++ b/docs/MODULE.md @@ -49,7 +49,7 @@ already follow all guidelines. You will find related files in : - If the process uses the `scilus` container, use the following replacements, else remove the whole section. - `depot.galaxyproject.org...` ⟹ `scil.usherbrooke.ca/containers/scilus_1.6.0.sif` + `depot.galaxyproject.org...` ⟹ `https://scil.usherbrooke.ca/containers/scilus_1.6.0.sif` `biocontainers/YOUR-TOOL-HERE` ⟹ `scilus/scilus:1.6.0` diff --git a/modules/nf-scil/preproc/gibbs/main.nf b/modules/nf-scil/preproc/gibbs/main.nf index 4008cf4b0..849f9f429 100644 --- a/modules/nf-scil/preproc/gibbs/main.nf +++ b/modules/nf-scil/preproc/gibbs/main.nf @@ -3,7 +3,7 @@ process PREPROC_GIBBS { label 'process_single' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'scil.usherbrooke.ca/containers/scilus_1.6.0.sif': + 'https://scil.usherbrooke.ca/containers/scilus_1.6.0.sif': 'scilus/scilus:1.6.0' }" input: diff --git a/modules/nf-scil/preproc/n4/main.nf b/modules/nf-scil/preproc/n4/main.nf index 9bc435953..e568d58cd 100644 --- a/modules/nf-scil/preproc/n4/main.nf +++ b/modules/nf-scil/preproc/n4/main.nf @@ -3,7 +3,7 @@ process PREPROC_N4 { label 'process_single' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'scil.usherbrooke.ca/containers/scilus_1.6.0.sif': + 'https://scil.usherbrooke.ca/containers/scilus_1.6.0.sif': 'scilus/scilus:1.6.0' }" input: diff --git a/modules/nf-scil/testdata/scilpy/main.nf b/modules/nf-scil/testdata/scilpy/main.nf index ea0a78096..880d1589d 100644 --- a/modules/nf-scil/testdata/scilpy/main.nf +++ b/modules/nf-scil/testdata/scilpy/main.nf @@ -4,7 +4,7 @@ process TESTDATA_SCILPY { label 'process_single' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'scil.usherbrooke.ca/containers/scilus_1.6.0.sif': + 'https://scil.usherbrooke.ca/containers/scilus_1.6.0.sif': 'scilus/scilus:1.6.0' }" input: @@ -44,4 +44,4 @@ process TESTDATA_SCILPY { scilpy: 1.6.0 END_VERSIONS """ -} \ No newline at end of file +} diff --git a/modules/nf-scil/tracking/localtracking/main.nf b/modules/nf-scil/tracking/localtracking/main.nf index ac4ab1bb4..2bebb988f 100644 --- a/modules/nf-scil/tracking/localtracking/main.nf +++ b/modules/nf-scil/tracking/localtracking/main.nf @@ -3,7 +3,7 @@ process TRACKING_LOCALTRACKING { label 'process_single' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'scil.usherbrooke.ca/containers/scilus_1.6.0.sif': + 'https://scil.usherbrooke.ca/containers/scilus_1.6.0.sif': 'scilus/scilus:1.6.0' }" input: diff --git a/tests/modules/nf-scil/denoising/mppca/test.yml b/tests/modules/nf-scil/denoising/mppca/test.yml index e9b313aa8..b0a4918fa 100644 --- a/tests/modules/nf-scil/denoising/mppca/test.yml +++ b/tests/modules/nf-scil/denoising/mppca/test.yml @@ -4,8 +4,6 @@ - denoising/mppca - denoising files: - - path: output/denoising/test__pre_dwi_denoised.nii.gz - md5sum: ece4b6fe910ae6502edd52b526609638 - path: output/denoising/test_dwi_denoised.nii.gz md5sum: 0cfa480f702173a047f076bbba9fe25e - path: output/denoising/versions.yml From 0acf7af48ad6943e02d5a24a6d26df8c8e6d8bca Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 9 Jan 2024 05:26:22 +0000 Subject: [PATCH 27/41] priors is a directory, no matter what --- modules/nf-scil/reconst/diffusivitypriors/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/nf-scil/reconst/diffusivitypriors/main.nf b/modules/nf-scil/reconst/diffusivitypriors/main.nf index c71a3cba5..a6d440c42 100644 --- a/modules/nf-scil/reconst/diffusivitypriors/main.nf +++ b/modules/nf-scil/reconst/diffusivitypriors/main.nf @@ -31,7 +31,7 @@ process RECONST_DIFFUSIVITYPRIORS { def roi_radius = task.ext.roi_radius ? "--roi_radius " + task.ext.roi_radius : "" """ - if [ ! -z "$priors" ] + if [ -d "$priors" ] then cat $priors/*__para_diff.txt > all_para_diff.txt awk '{ total += \$1; count++ } END { print total/count }' all_para_diff.txt > mean_para_diff.txt From 5f1c3008b4b234fd883e5c18af876fe4540912fa Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Wed, 10 Jan 2024 22:00:17 +0000 Subject: [PATCH 28/41] bind /tmp to volume to prevent out-of-space --- .devcontainer/devcontainer.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 0f627bb5e..fab32d98e 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -59,5 +59,6 @@ } }, "init": true, - "privileged": true + "privileged": true, + "runArgs": ["--mount", "type=volume,target=/tmp/"] } From 450d86124fd73e14d03805212ae9f76f7c2d4b52 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 00:59:07 +0000 Subject: [PATCH 29/41] fix noddi, n4, fslbetcrop --- modules/nf-scil/reconst/noddi/main.nf | 2 +- .../nf-scil/betcrop/fslbetcrop/test.yml | 2 +- .../nf-scil/preproc/n4/nextflow.config | 8 +- tests/modules/nf-scil/preproc/n4/test.yml | 2 +- tests/modules/nf-scil/reconst/noddi/main.nf | 10 +- .../nf-scil/reconst/noddi/nextflow.config | 1 - tests/modules/nf-scil/reconst/noddi/test.yml | 584 +++++++++--------- 7 files changed, 306 insertions(+), 303 deletions(-) diff --git a/modules/nf-scil/reconst/noddi/main.nf b/modules/nf-scil/reconst/noddi/main.nf index f7fe963b8..66ec20fa9 100644 --- a/modules/nf-scil/reconst/noddi/main.nf +++ b/modules/nf-scil/reconst/noddi/main.nf @@ -39,7 +39,7 @@ process RECONST_NODDI { $lambda2 $nb_threads $b_thr $set_mask $set_kernels - if [ ! -z "$kernels" ]; then + if [ -d "$kernels" ]; then mv results/FIT_dir.nii.gz ${prefix}__FIT_dir.nii.gz mv results/FIT_ICVF.nii.gz ${prefix}__FIT_ICVF.nii.gz mv results/FIT_ISOVF.nii.gz ${prefix}__FIT_ISOVF.nii.gz diff --git a/tests/modules/nf-scil/betcrop/fslbetcrop/test.yml b/tests/modules/nf-scil/betcrop/fslbetcrop/test.yml index 4ad11b8cf..ada9e7cb5 100644 --- a/tests/modules/nf-scil/betcrop/fslbetcrop/test.yml +++ b/tests/modules/nf-scil/betcrop/fslbetcrop/test.yml @@ -7,7 +7,7 @@ - path: output/betcrop/test__dwi_bet_cropped.nii.gz md5sum: e597e3030800fbf9de681c1a5e539c9d - path: output/betcrop/test__dwi_bet_cropped_mask.nii.gz - md5sum: 2fc5bd8704c86fe94b79c1d651e94032 + md5sum: d511acb62d15958b5a8f2116cdb3ce3a - path: output/betcrop/test__dwi_boundingBox.pkl md5sum: 89012d5b4f0f1bee03658b193911e0a1 - path: output/betcrop/versions.yml diff --git a/tests/modules/nf-scil/preproc/n4/nextflow.config b/tests/modules/nf-scil/preproc/n4/nextflow.config index 50f50a7a3..0ec7cb244 100644 --- a/tests/modules/nf-scil/preproc/n4/nextflow.config +++ b/tests/modules/nf-scil/preproc/n4/nextflow.config @@ -1,5 +1,9 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - -} \ No newline at end of file + + withName: "PREPROC_N4" { + ext.bspline_knot_per_voxel = 0.25 + ext.shrink_factor = 4 + } +} diff --git a/tests/modules/nf-scil/preproc/n4/test.yml b/tests/modules/nf-scil/preproc/n4/test.yml index 408e7237b..1cc5d9159 100644 --- a/tests/modules/nf-scil/preproc/n4/test.yml +++ b/tests/modules/nf-scil/preproc/n4/test.yml @@ -5,5 +5,5 @@ - preproc/n4 files: - path: output/preproc/test__dwi_n4.nii.gz - md5sum: d7a741dcf1a1f9176619790e9d708940 + md5sum: 1068c745054b73e4cd836100d34c54a7 - path: output/preproc/versions.yml diff --git a/tests/modules/nf-scil/reconst/noddi/main.nf b/tests/modules/nf-scil/reconst/noddi/main.nf index 8d9a6d086..dc573cbc7 100644 --- a/tests/modules/nf-scil/reconst/noddi/main.nf +++ b/tests/modules/nf-scil/reconst/noddi/main.nf @@ -4,9 +4,9 @@ nextflow.enable.dsl = 2 include { LOAD_TEST_DATA } from '../../../../../subworkflows/nf-scil/load_test_data/main.nf' -include { - RECONST_NODDI as COMPUTE_KERNELS; - RECONST_NODDI as COMPUTE_METRICS} from '../../../../../modules/nf-scil/reconst/noddi/main.nf' +include { + RECONST_NODDI as RECONST_NODDI_KERNELS; + RECONST_NODDI as RECONST_NODDI_METRICS} from '../../../../../modules/nf-scil/reconst/noddi/main.nf' workflow test_reconst_noddi_save_kernels { @@ -24,7 +24,7 @@ workflow test_reconst_noddi_save_kernels { [] ]} - COMPUTE_KERNELS ( input_noddi ) + RECONST_NODDI_KERNELS ( input_noddi ) } workflow test_reconst_noddi_save_kernels_no_mask { @@ -42,5 +42,5 @@ workflow test_reconst_noddi_save_kernels_no_mask { [], [] ]} - COMPUTE_KERNELS ( input_noddi ) + RECONST_NODDI_KERNELS ( input_noddi ) } diff --git a/tests/modules/nf-scil/reconst/noddi/nextflow.config b/tests/modules/nf-scil/reconst/noddi/nextflow.config index 0b09884d9..e18972fe9 100644 --- a/tests/modules/nf-scil/reconst/noddi/nextflow.config +++ b/tests/modules/nf-scil/reconst/noddi/nextflow.config @@ -2,7 +2,6 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: "RECONST_NODDI" { - publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } ext.para_diff = 1.7e-3 ext.iso_diff = 3e-3 ext.lambda1 = 0.5 diff --git a/tests/modules/nf-scil/reconst/noddi/test.yml b/tests/modules/nf-scil/reconst/noddi/test.yml index 6e9338c54..e5e336e7c 100644 --- a/tests/modules/nf-scil/reconst/noddi/test.yml +++ b/tests/modules/nf-scil/reconst/noddi/test.yml @@ -4,297 +4,297 @@ - reconst/noddi - reconst files: - - path: output/compute/kernels/A_001.npy + - path: output/reconst/kernels/A_001.npy md5sum: 7685778d192b6e900af06af03de1c40a - - path: output/compute/kernels/A_002.npy + - path: output/reconst/kernels/A_002.npy md5sum: dcc12f440bef6378ff60c0819dee2908 - - path: output/compute/kernels/A_003.npy + - path: output/reconst/kernels/A_003.npy md5sum: ed1167026a33c054d763281e1557a4e3 - - path: output/compute/kernels/A_004.npy + - path: output/reconst/kernels/A_004.npy md5sum: 54c0d829a6aa1e4e64494557cc5eb9e6 - - path: output/compute/kernels/A_005.npy + - path: output/reconst/kernels/A_005.npy md5sum: 3a8b3795e38d8b3bd0e4bcb7e68fd1b1 - - path: output/compute/kernels/A_006.npy + - path: output/reconst/kernels/A_006.npy md5sum: a4d016455774b4fa0e2fed7d332b20da - - path: output/compute/kernels/A_007.npy + - path: output/reconst/kernels/A_007.npy md5sum: 4c0e816675bdad94211a915f68378af1 - - path: output/compute/kernels/A_008.npy + - path: output/reconst/kernels/A_008.npy md5sum: 7cf5b62efc28be3e853ef51851a49790 - - path: output/compute/kernels/A_009.npy + - path: output/reconst/kernels/A_009.npy md5sum: bd46f9cfebc7c819a890c367ab6cb1da - - path: output/compute/kernels/A_010.npy + - path: output/reconst/kernels/A_010.npy md5sum: 609c7b5227168255d8cb7f1bee7333c2 - - path: output/compute/kernels/A_011.npy + - path: output/reconst/kernels/A_011.npy md5sum: 63bb74c9f92484bf928468994efb8f78 - - path: output/compute/kernels/A_012.npy + - path: output/reconst/kernels/A_012.npy md5sum: 9a5a92dabd50ab72bf03442a61656499 - - path: output/compute/kernels/A_013.npy + - path: output/reconst/kernels/A_013.npy md5sum: 0039dedb0f99de263117075648c1ba0c - - path: output/compute/kernels/A_014.npy + - path: output/reconst/kernels/A_014.npy md5sum: 8ef7c0316b06ab56b1bb239124bce91b - - path: output/compute/kernels/A_015.npy + - path: output/reconst/kernels/A_015.npy md5sum: 3b1f91a5ba7c52a1d9c7d4c5d7c170e1 - - path: output/compute/kernels/A_016.npy + - path: output/reconst/kernels/A_016.npy md5sum: 5d1b88ffe98c426eeb761b0c2ea17e52 - - path: output/compute/kernels/A_017.npy + - path: output/reconst/kernels/A_017.npy md5sum: abd1b83fa7d8ad6d5f248c11f9056ba3 - - path: output/compute/kernels/A_018.npy + - path: output/reconst/kernels/A_018.npy md5sum: d251399b3c9cd9204a4870504da4d281 - - path: output/compute/kernels/A_019.npy + - path: output/reconst/kernels/A_019.npy md5sum: 849ab4311ec7b01b684fc2b40c1b2778 - - path: output/compute/kernels/A_020.npy + - path: output/reconst/kernels/A_020.npy md5sum: 3f4c04129b1e7985bf0771ddfba8c015 - - path: output/compute/kernels/A_021.npy + - path: output/reconst/kernels/A_021.npy md5sum: 1994666f9141d28b907b8718e7e6f777 - - path: output/compute/kernels/A_022.npy + - path: output/reconst/kernels/A_022.npy md5sum: 4aaa5e9a86fb8fb01da26e41e2b06b1c - - path: output/compute/kernels/A_023.npy + - path: output/reconst/kernels/A_023.npy md5sum: 17e198034e513baeac9ff73ea15ee25a - - path: output/compute/kernels/A_024.npy + - path: output/reconst/kernels/A_024.npy md5sum: 3490a078a05053501c4aeac7f0b4f552 - - path: output/compute/kernels/A_025.npy + - path: output/reconst/kernels/A_025.npy md5sum: 4b5e35e1fb093421c673fba6d6c9b6df - - path: output/compute/kernels/A_026.npy + - path: output/reconst/kernels/A_026.npy md5sum: 9f50a6cf0ae4fece488696e057a275d6 - - path: output/compute/kernels/A_027.npy + - path: output/reconst/kernels/A_027.npy md5sum: fa1be584c09c3618734c96d702df53a8 - - path: output/compute/kernels/A_028.npy + - path: output/reconst/kernels/A_028.npy md5sum: 48dd2b0ed0cd5eb36602b5318e8e7a3a - - path: output/compute/kernels/A_029.npy + - path: output/reconst/kernels/A_029.npy md5sum: b3eb55ae0f277a19dc8f1d84a8dd895a - - path: output/compute/kernels/A_030.npy + - path: output/reconst/kernels/A_030.npy md5sum: d57c41283368a9b66b487f9db2d05fc2 - - path: output/compute/kernels/A_031.npy + - path: output/reconst/kernels/A_031.npy md5sum: 3cf5adb3ad92b1d67993722d9eab380b - - path: output/compute/kernels/A_032.npy + - path: output/reconst/kernels/A_032.npy md5sum: 779aa382425bedf4b966ed43a2609801 - - path: output/compute/kernels/A_033.npy + - path: output/reconst/kernels/A_033.npy md5sum: 4d4248fb123b63af721b3e097ff9903f - - path: output/compute/kernels/A_034.npy + - path: output/reconst/kernels/A_034.npy md5sum: a0f02c3980cd5a125f262d7e68a9a23c - - path: output/compute/kernels/A_035.npy + - path: output/reconst/kernels/A_035.npy md5sum: 8d919c8600bbb6f586b6c334fec84f9f - - path: output/compute/kernels/A_036.npy + - path: output/reconst/kernels/A_036.npy md5sum: d6edbee74e08a8c20b015ed4ae18b2ab - - path: output/compute/kernels/A_037.npy + - path: output/reconst/kernels/A_037.npy md5sum: f6f6d482a7ca42369f4c62e6f8eef552 - - path: output/compute/kernels/A_038.npy + - path: output/reconst/kernels/A_038.npy md5sum: 9dff1ac269b381a75f5c25bf051bf1c5 - - path: output/compute/kernels/A_039.npy + - path: output/reconst/kernels/A_039.npy md5sum: 8892bea5472f1b40c5868a536294f134 - - path: output/compute/kernels/A_040.npy + - path: output/reconst/kernels/A_040.npy md5sum: 99adce8e31e9288ea1550b8029f2bae0 - - path: output/compute/kernels/A_041.npy + - path: output/reconst/kernels/A_041.npy md5sum: 1b3fecb234dfc9e4554ac7df11017595 - - path: output/compute/kernels/A_042.npy + - path: output/reconst/kernels/A_042.npy md5sum: 1bc1a2afe6f03b031d7667566a084cb1 - - path: output/compute/kernels/A_043.npy + - path: output/reconst/kernels/A_043.npy md5sum: 3743e4a06a8c52b21e46d20bbb68c89c - - path: output/compute/kernels/A_044.npy + - path: output/reconst/kernels/A_044.npy md5sum: 022bc4586ed94d71e03096de6fdf2443 - - path: output/compute/kernels/A_045.npy + - path: output/reconst/kernels/A_045.npy md5sum: cd15743a7139f981fa65905ebb4c669f - - path: output/compute/kernels/A_046.npy + - path: output/reconst/kernels/A_046.npy md5sum: 89d46a8684dcbed0b84892f536745aeb - - path: output/compute/kernels/A_047.npy + - path: output/reconst/kernels/A_047.npy md5sum: 607d4cf2568437a32e130640033b2eb6 - - path: output/compute/kernels/A_048.npy + - path: output/reconst/kernels/A_048.npy md5sum: 080089c3ee74f6a670ba2f4629739523 - - path: output/compute/kernels/A_049.npy + - path: output/reconst/kernels/A_049.npy md5sum: ab43ede1e8e72e93985dbafe52fcae41 - - path: output/compute/kernels/A_050.npy + - path: output/reconst/kernels/A_050.npy md5sum: 89e79074000af7660861fe8596abf2dc - - path: output/compute/kernels/A_051.npy + - path: output/reconst/kernels/A_051.npy md5sum: dd8a8961ff769c358961b589c9fd605c - - path: output/compute/kernels/A_052.npy + - path: output/reconst/kernels/A_052.npy md5sum: c5f520b60797e0dc88aefaa705442a82 - - path: output/compute/kernels/A_053.npy + - path: output/reconst/kernels/A_053.npy md5sum: 7355cc61e1edf1576f8d695285b56d88 - - path: output/compute/kernels/A_054.npy + - path: output/reconst/kernels/A_054.npy md5sum: dc5d18d1f95b9b1251c6f1007231dd41 - - path: output/compute/kernels/A_055.npy + - path: output/reconst/kernels/A_055.npy md5sum: 51815fd6d8717d7bf729bbd8f0215bbf - - path: output/compute/kernels/A_056.npy + - path: output/reconst/kernels/A_056.npy md5sum: ede3f55419c4e8a41de56ea2fa357c2f - - path: output/compute/kernels/A_057.npy + - path: output/reconst/kernels/A_057.npy md5sum: 0be644cc5e4e962f38d7a60d6894cf08 - - path: output/compute/kernels/A_058.npy + - path: output/reconst/kernels/A_058.npy md5sum: a60bfda5b16ceb96f6e3a4a26ff8913f - - path: output/compute/kernels/A_059.npy + - path: output/reconst/kernels/A_059.npy md5sum: 71b34384955060e2cb2cb4cf3329842b - - path: output/compute/kernels/A_060.npy + - path: output/reconst/kernels/A_060.npy md5sum: 90a2a4f915947c7d2ee99969ba394f16 - - path: output/compute/kernels/A_061.npy + - path: output/reconst/kernels/A_061.npy md5sum: 29127b6428928197cd999fba5cc15962 - - path: output/compute/kernels/A_062.npy + - path: output/reconst/kernels/A_062.npy md5sum: c4cb3322d1f57543f3a4c24bdd6542d0 - - path: output/compute/kernels/A_063.npy + - path: output/reconst/kernels/A_063.npy md5sum: f2a434d9087486822d9467aa9a2949c6 - - path: output/compute/kernels/A_064.npy + - path: output/reconst/kernels/A_064.npy md5sum: 14629aaf9c0381e548ffa2859f02c7f2 - - path: output/compute/kernels/A_065.npy + - path: output/reconst/kernels/A_065.npy md5sum: 82964736555dd0e33d107b8d1ba04b99 - - path: output/compute/kernels/A_066.npy + - path: output/reconst/kernels/A_066.npy md5sum: aa282cb662a2581296f8c6432bec5def - - path: output/compute/kernels/A_067.npy + - path: output/reconst/kernels/A_067.npy md5sum: a8273af0f3f0f78ec36a9256b8e8838b - - path: output/compute/kernels/A_068.npy + - path: output/reconst/kernels/A_068.npy md5sum: 0243467ee9f75aafda13e586d3292236 - - path: output/compute/kernels/A_069.npy + - path: output/reconst/kernels/A_069.npy md5sum: 70b6e1cf69afa7861cdc71209bff19e3 - - path: output/compute/kernels/A_070.npy + - path: output/reconst/kernels/A_070.npy md5sum: ee159bf58499762488db9a3e80038e09 - - path: output/compute/kernels/A_071.npy + - path: output/reconst/kernels/A_071.npy md5sum: c255bfec1bebe547bce036b3d276b158 - - path: output/compute/kernels/A_072.npy + - path: output/reconst/kernels/A_072.npy md5sum: 5a7785fdd6906e33bbad4a4610e39dc0 - - path: output/compute/kernels/A_073.npy + - path: output/reconst/kernels/A_073.npy md5sum: 83d41cc79f305d9a097a91f18250319d - - path: output/compute/kernels/A_074.npy + - path: output/reconst/kernels/A_074.npy md5sum: 4b30b4210b3e5c4e026a58892308a4e3 - - path: output/compute/kernels/A_075.npy + - path: output/reconst/kernels/A_075.npy md5sum: f6f83ea9451b3f96a7bbe015cc9c8c21 - - path: output/compute/kernels/A_076.npy + - path: output/reconst/kernels/A_076.npy md5sum: 40e72cd83f75039825e15d9ce4ed05e4 - - path: output/compute/kernels/A_077.npy + - path: output/reconst/kernels/A_077.npy md5sum: c961a829523e020ef5e0ebf1a733c19b - - path: output/compute/kernels/A_078.npy + - path: output/reconst/kernels/A_078.npy md5sum: c24066930825c7756342742df6331c82 - - path: output/compute/kernels/A_079.npy + - path: output/reconst/kernels/A_079.npy md5sum: 07ddc1e8a148442e83e6aca66e13acd5 - - path: output/compute/kernels/A_080.npy + - path: output/reconst/kernels/A_080.npy md5sum: 709ac9b7c8301153f0317b96cda75552 - - path: output/compute/kernels/A_081.npy + - path: output/reconst/kernels/A_081.npy md5sum: 8bede120aa60737b83b61d8d0aac582a - - path: output/compute/kernels/A_082.npy + - path: output/reconst/kernels/A_082.npy md5sum: 3945f973add1a6db95fe58e329ec91e7 - - path: output/compute/kernels/A_083.npy + - path: output/reconst/kernels/A_083.npy md5sum: 4c201bac152c7e9130830d8915886683 - - path: output/compute/kernels/A_084.npy + - path: output/reconst/kernels/A_084.npy md5sum: 233696d75a8e674a0820b0f4123b621b - - path: output/compute/kernels/A_085.npy + - path: output/reconst/kernels/A_085.npy md5sum: 783ed22541c2470a64236cfb9745a787 - - path: output/compute/kernels/A_086.npy + - path: output/reconst/kernels/A_086.npy md5sum: c4df82fe0257e943bff9bbfe97c8bd01 - - path: output/compute/kernels/A_087.npy + - path: output/reconst/kernels/A_087.npy md5sum: 69ffd05ba012ae164e882e2f0720492b - - path: output/compute/kernels/A_088.npy + - path: output/reconst/kernels/A_088.npy md5sum: 0b41b02520099e6945a34fdbcc5ee7a8 - - path: output/compute/kernels/A_089.npy + - path: output/reconst/kernels/A_089.npy md5sum: 0eea673e2d6ecfa072bc8e72c878cdcd - - path: output/compute/kernels/A_090.npy + - path: output/reconst/kernels/A_090.npy md5sum: be79cf920ab80d7f6ebd04ec878eb6f6 - - path: output/compute/kernels/A_091.npy + - path: output/reconst/kernels/A_091.npy md5sum: a33b58f20a68bdc59f262c714c0f5616 - - path: output/compute/kernels/A_092.npy + - path: output/reconst/kernels/A_092.npy md5sum: d0f3e9e8b8ce77c4c91a95f6bf91dadf - - path: output/compute/kernels/A_093.npy + - path: output/reconst/kernels/A_093.npy md5sum: 8af839b27f6b1b3e0c712c73b9ca95d5 - - path: output/compute/kernels/A_094.npy + - path: output/reconst/kernels/A_094.npy md5sum: 33045a5adb24c5a4c04e28f1495e4af2 - - path: output/compute/kernels/A_095.npy + - path: output/reconst/kernels/A_095.npy md5sum: 747034be17ce34ac1837a7ef4a785675 - - path: output/compute/kernels/A_096.npy + - path: output/reconst/kernels/A_096.npy md5sum: 93d719b74fb1fb304784aece5b6c5ab4 - - path: output/compute/kernels/A_097.npy + - path: output/reconst/kernels/A_097.npy md5sum: 013fa7ae6870925e17fb73ee78809502 - - path: output/compute/kernels/A_098.npy + - path: output/reconst/kernels/A_098.npy md5sum: 096edd9c291d4f9d85152b50e86a3112 - - path: output/compute/kernels/A_099.npy + - path: output/reconst/kernels/A_099.npy md5sum: 08b65ea77d226c1f7d5876cee415ce4a - - path: output/compute/kernels/A_100.npy + - path: output/reconst/kernels/A_100.npy md5sum: dd38ec6122813be0450bc9a89401b040 - - path: output/compute/kernels/A_101.npy + - path: output/reconst/kernels/A_101.npy md5sum: 013c4e58cfe8deda708575a2bf51ec43 - - path: output/compute/kernels/A_102.npy + - path: output/reconst/kernels/A_102.npy md5sum: 92f56a865fda5830512f30b158472357 - - path: output/compute/kernels/A_103.npy + - path: output/reconst/kernels/A_103.npy md5sum: 6cc2c4cb5259aa5a08eb87ae78eabd2f - - path: output/compute/kernels/A_104.npy + - path: output/reconst/kernels/A_104.npy md5sum: c286597b7b9101f9fbde1a5a89c9aa57 - - path: output/compute/kernels/A_105.npy + - path: output/reconst/kernels/A_105.npy md5sum: a193d8436a95481f3dcd9a2fb22bf8c1 - - path: output/compute/kernels/A_106.npy + - path: output/reconst/kernels/A_106.npy md5sum: 8e7e770817081413839881b31af40684 - - path: output/compute/kernels/A_107.npy + - path: output/reconst/kernels/A_107.npy md5sum: 8d5987397d5f7fc4e13c4bd94a9ed0ff - - path: output/compute/kernels/A_108.npy + - path: output/reconst/kernels/A_108.npy md5sum: 83ee0ee52c845957cf5ce16cb343fb57 - - path: output/compute/kernels/A_109.npy + - path: output/reconst/kernels/A_109.npy md5sum: 6c25ca897da95f16e92384024a9ff6f8 - - path: output/compute/kernels/A_110.npy + - path: output/reconst/kernels/A_110.npy md5sum: dd40c52181be77afeaa77bdc611fb6d4 - - path: output/compute/kernels/A_111.npy + - path: output/reconst/kernels/A_111.npy md5sum: 8ce666c5c5c954c051a0344af7405571 - - path: output/compute/kernels/A_112.npy + - path: output/reconst/kernels/A_112.npy md5sum: 0f05bf2d7694ca3572ba495fa46ee81f - - path: output/compute/kernels/A_113.npy + - path: output/reconst/kernels/A_113.npy md5sum: ff3fae915eceb71c3d550245f4467752 - - path: output/compute/kernels/A_114.npy + - path: output/reconst/kernels/A_114.npy md5sum: b82049618788b0450e2b43c5f6d8119d - - path: output/compute/kernels/A_115.npy + - path: output/reconst/kernels/A_115.npy md5sum: ac9a6075479974c1fecd9352550456b6 - - path: output/compute/kernels/A_116.npy + - path: output/reconst/kernels/A_116.npy md5sum: 09482147106b80d7bd6fda72bf8baf86 - - path: output/compute/kernels/A_117.npy + - path: output/reconst/kernels/A_117.npy md5sum: c93cc75f20ad3cd089ba4a63179fe67b - - path: output/compute/kernels/A_118.npy + - path: output/reconst/kernels/A_118.npy md5sum: 4c1f5ad2c1e7ae137e0172fb130107b0 - - path: output/compute/kernels/A_119.npy + - path: output/reconst/kernels/A_119.npy md5sum: 54e168db1229f90809eef6f99db87313 - - path: output/compute/kernels/A_120.npy + - path: output/reconst/kernels/A_120.npy md5sum: 44aea9be1bebcc33ca4fc117fda3c381 - - path: output/compute/kernels/A_121.npy + - path: output/reconst/kernels/A_121.npy md5sum: bf30fc934d14d27298bc5fca0c628797 - - path: output/compute/kernels/A_122.npy + - path: output/reconst/kernels/A_122.npy md5sum: 6f69fda57c4e7630f554e55ecc18e9de - - path: output/compute/kernels/A_123.npy + - path: output/reconst/kernels/A_123.npy md5sum: e61656ac07e14b47b51cfd81076a36ad - - path: output/compute/kernels/A_124.npy + - path: output/reconst/kernels/A_124.npy md5sum: a20061f96654459c07919e96369f6bbd - - path: output/compute/kernels/A_125.npy + - path: output/reconst/kernels/A_125.npy md5sum: 5ece1efe1e275f67cabc7b36eba4cbb8 - - path: output/compute/kernels/A_126.npy + - path: output/reconst/kernels/A_126.npy md5sum: c42eefa83a7342fec1bedce2bd57cc3d - - path: output/compute/kernels/A_127.npy + - path: output/reconst/kernels/A_127.npy md5sum: 5dc4f61d3cdddda0dbd94333a3dd7c88 - - path: output/compute/kernels/A_128.npy + - path: output/reconst/kernels/A_128.npy md5sum: 1facea6c3dcd3c2c16242987d65916a2 - - path: output/compute/kernels/A_129.npy + - path: output/reconst/kernels/A_129.npy md5sum: 779b671302e80805766851b8788a561b - - path: output/compute/kernels/A_130.npy + - path: output/reconst/kernels/A_130.npy md5sum: 8c116cc4b794f11122ea805bb1ba638b - - path: output/compute/kernels/A_131.npy + - path: output/reconst/kernels/A_131.npy md5sum: 08537ed38e1cd6acc6109ea9b042751a - - path: output/compute/kernels/A_132.npy + - path: output/reconst/kernels/A_132.npy md5sum: d69051b321a5e88037092ee5c95d99a4 - - path: output/compute/kernels/A_133.npy + - path: output/reconst/kernels/A_133.npy md5sum: beed98dcb37f25f4b8ce36b028c5aa32 - - path: output/compute/kernels/A_134.npy + - path: output/reconst/kernels/A_134.npy md5sum: d04ce67db73a8c879e7353c2b4c826ae - - path: output/compute/kernels/A_135.npy + - path: output/reconst/kernels/A_135.npy md5sum: dd502124b6581bd38b02acca47adbf6f - - path: output/compute/kernels/A_136.npy + - path: output/reconst/kernels/A_136.npy md5sum: ad932cbfe06c66a9ac3c3a1b02c75e47 - - path: output/compute/kernels/A_137.npy + - path: output/reconst/kernels/A_137.npy md5sum: 442480b9820c11cdd36ce119f9ebdbf6 - - path: output/compute/kernels/A_138.npy + - path: output/reconst/kernels/A_138.npy md5sum: e9b8ab1a2fd6f93b5584d8b23b752ef2 - - path: output/compute/kernels/A_139.npy + - path: output/reconst/kernels/A_139.npy md5sum: f53cbf9709a8d58de37ce719820b86dc - - path: output/compute/kernels/A_140.npy + - path: output/reconst/kernels/A_140.npy md5sum: 5f282c2887dbc956c72a80b65723f4ad - - path: output/compute/kernels/A_141.npy + - path: output/reconst/kernels/A_141.npy md5sum: 66da08fff4ea0637d0f44310f3edeb2f - - path: output/compute/kernels/A_142.npy + - path: output/reconst/kernels/A_142.npy md5sum: 565543843cdc7d2d48b89d2af794f37b - - path: output/compute/kernels/A_143.npy + - path: output/reconst/kernels/A_143.npy md5sum: 49b31341747a3fc81b9c7bfe65654ff7 - - path: output/compute/kernels/A_144.npy + - path: output/reconst/kernels/A_144.npy md5sum: 6a2a6393d7c6ccdf27cf4885c5cb34dd - - path: output/compute/kernels/A_145.npy + - path: output/reconst/kernels/A_145.npy md5sum: a26e3876df3c02b399f2d21bbd3ebf3a - - path: output/compute/versions.yml + - path: output/reconst/versions.yml - name: reconst noddi test_reconst_noddi_save_kernels_no_mask command: nextflow run ./tests/modules/nf-scil/reconst/noddi -entry test_reconst_noddi_save_kernels_no_mask -c ./tests/config/nextflow.config @@ -302,294 +302,294 @@ - reconst/noddi - reconst files: - - path: output/compute/kernels/A_001.npy + - path: output/reconst/kernels/A_001.npy md5sum: 7685778d192b6e900af06af03de1c40a - - path: output/compute/kernels/A_002.npy + - path: output/reconst/kernels/A_002.npy md5sum: dcc12f440bef6378ff60c0819dee2908 - - path: output/compute/kernels/A_003.npy + - path: output/reconst/kernels/A_003.npy md5sum: ed1167026a33c054d763281e1557a4e3 - - path: output/compute/kernels/A_004.npy + - path: output/reconst/kernels/A_004.npy md5sum: 54c0d829a6aa1e4e64494557cc5eb9e6 - - path: output/compute/kernels/A_005.npy + - path: output/reconst/kernels/A_005.npy md5sum: 3a8b3795e38d8b3bd0e4bcb7e68fd1b1 - - path: output/compute/kernels/A_006.npy + - path: output/reconst/kernels/A_006.npy md5sum: a4d016455774b4fa0e2fed7d332b20da - - path: output/compute/kernels/A_007.npy + - path: output/reconst/kernels/A_007.npy md5sum: 4c0e816675bdad94211a915f68378af1 - - path: output/compute/kernels/A_008.npy + - path: output/reconst/kernels/A_008.npy md5sum: 7cf5b62efc28be3e853ef51851a49790 - - path: output/compute/kernels/A_009.npy + - path: output/reconst/kernels/A_009.npy md5sum: bd46f9cfebc7c819a890c367ab6cb1da - - path: output/compute/kernels/A_010.npy + - path: output/reconst/kernels/A_010.npy md5sum: 609c7b5227168255d8cb7f1bee7333c2 - - path: output/compute/kernels/A_011.npy + - path: output/reconst/kernels/A_011.npy md5sum: 63bb74c9f92484bf928468994efb8f78 - - path: output/compute/kernels/A_012.npy + - path: output/reconst/kernels/A_012.npy md5sum: 9a5a92dabd50ab72bf03442a61656499 - - path: output/compute/kernels/A_013.npy + - path: output/reconst/kernels/A_013.npy md5sum: 0039dedb0f99de263117075648c1ba0c - - path: output/compute/kernels/A_014.npy + - path: output/reconst/kernels/A_014.npy md5sum: 8ef7c0316b06ab56b1bb239124bce91b - - path: output/compute/kernels/A_015.npy + - path: output/reconst/kernels/A_015.npy md5sum: 3b1f91a5ba7c52a1d9c7d4c5d7c170e1 - - path: output/compute/kernels/A_016.npy + - path: output/reconst/kernels/A_016.npy md5sum: 5d1b88ffe98c426eeb761b0c2ea17e52 - - path: output/compute/kernels/A_017.npy + - path: output/reconst/kernels/A_017.npy md5sum: abd1b83fa7d8ad6d5f248c11f9056ba3 - - path: output/compute/kernels/A_018.npy + - path: output/reconst/kernels/A_018.npy md5sum: d251399b3c9cd9204a4870504da4d281 - - path: output/compute/kernels/A_019.npy + - path: output/reconst/kernels/A_019.npy md5sum: 849ab4311ec7b01b684fc2b40c1b2778 - - path: output/compute/kernels/A_020.npy + - path: output/reconst/kernels/A_020.npy md5sum: 3f4c04129b1e7985bf0771ddfba8c015 - - path: output/compute/kernels/A_021.npy + - path: output/reconst/kernels/A_021.npy md5sum: 1994666f9141d28b907b8718e7e6f777 - - path: output/compute/kernels/A_022.npy + - path: output/reconst/kernels/A_022.npy md5sum: 4aaa5e9a86fb8fb01da26e41e2b06b1c - - path: output/compute/kernels/A_023.npy + - path: output/reconst/kernels/A_023.npy md5sum: 17e198034e513baeac9ff73ea15ee25a - - path: output/compute/kernels/A_024.npy + - path: output/reconst/kernels/A_024.npy md5sum: 3490a078a05053501c4aeac7f0b4f552 - - path: output/compute/kernels/A_025.npy + - path: output/reconst/kernels/A_025.npy md5sum: 4b5e35e1fb093421c673fba6d6c9b6df - - path: output/compute/kernels/A_026.npy + - path: output/reconst/kernels/A_026.npy md5sum: 9f50a6cf0ae4fece488696e057a275d6 - - path: output/compute/kernels/A_027.npy + - path: output/reconst/kernels/A_027.npy md5sum: fa1be584c09c3618734c96d702df53a8 - - path: output/compute/kernels/A_028.npy + - path: output/reconst/kernels/A_028.npy md5sum: 48dd2b0ed0cd5eb36602b5318e8e7a3a - - path: output/compute/kernels/A_029.npy + - path: output/reconst/kernels/A_029.npy md5sum: b3eb55ae0f277a19dc8f1d84a8dd895a - - path: output/compute/kernels/A_030.npy + - path: output/reconst/kernels/A_030.npy md5sum: d57c41283368a9b66b487f9db2d05fc2 - - path: output/compute/kernels/A_031.npy + - path: output/reconst/kernels/A_031.npy md5sum: 3cf5adb3ad92b1d67993722d9eab380b - - path: output/compute/kernels/A_032.npy + - path: output/reconst/kernels/A_032.npy md5sum: 779aa382425bedf4b966ed43a2609801 - - path: output/compute/kernels/A_033.npy + - path: output/reconst/kernels/A_033.npy md5sum: 4d4248fb123b63af721b3e097ff9903f - - path: output/compute/kernels/A_034.npy + - path: output/reconst/kernels/A_034.npy md5sum: a0f02c3980cd5a125f262d7e68a9a23c - - path: output/compute/kernels/A_035.npy + - path: output/reconst/kernels/A_035.npy md5sum: 8d919c8600bbb6f586b6c334fec84f9f - - path: output/compute/kernels/A_036.npy + - path: output/reconst/kernels/A_036.npy md5sum: d6edbee74e08a8c20b015ed4ae18b2ab - - path: output/compute/kernels/A_037.npy + - path: output/reconst/kernels/A_037.npy md5sum: f6f6d482a7ca42369f4c62e6f8eef552 - - path: output/compute/kernels/A_038.npy + - path: output/reconst/kernels/A_038.npy md5sum: 9dff1ac269b381a75f5c25bf051bf1c5 - - path: output/compute/kernels/A_039.npy + - path: output/reconst/kernels/A_039.npy md5sum: 8892bea5472f1b40c5868a536294f134 - - path: output/compute/kernels/A_040.npy + - path: output/reconst/kernels/A_040.npy md5sum: 99adce8e31e9288ea1550b8029f2bae0 - - path: output/compute/kernels/A_041.npy + - path: output/reconst/kernels/A_041.npy md5sum: 1b3fecb234dfc9e4554ac7df11017595 - - path: output/compute/kernels/A_042.npy + - path: output/reconst/kernels/A_042.npy md5sum: 1bc1a2afe6f03b031d7667566a084cb1 - - path: output/compute/kernels/A_043.npy + - path: output/reconst/kernels/A_043.npy md5sum: 3743e4a06a8c52b21e46d20bbb68c89c - - path: output/compute/kernels/A_044.npy + - path: output/reconst/kernels/A_044.npy md5sum: 022bc4586ed94d71e03096de6fdf2443 - - path: output/compute/kernels/A_045.npy + - path: output/reconst/kernels/A_045.npy md5sum: cd15743a7139f981fa65905ebb4c669f - - path: output/compute/kernels/A_046.npy + - path: output/reconst/kernels/A_046.npy md5sum: 89d46a8684dcbed0b84892f536745aeb - - path: output/compute/kernels/A_047.npy + - path: output/reconst/kernels/A_047.npy md5sum: 607d4cf2568437a32e130640033b2eb6 - - path: output/compute/kernels/A_048.npy + - path: output/reconst/kernels/A_048.npy md5sum: 080089c3ee74f6a670ba2f4629739523 - - path: output/compute/kernels/A_049.npy + - path: output/reconst/kernels/A_049.npy md5sum: ab43ede1e8e72e93985dbafe52fcae41 - - path: output/compute/kernels/A_050.npy + - path: output/reconst/kernels/A_050.npy md5sum: 89e79074000af7660861fe8596abf2dc - - path: output/compute/kernels/A_051.npy + - path: output/reconst/kernels/A_051.npy md5sum: dd8a8961ff769c358961b589c9fd605c - - path: output/compute/kernels/A_052.npy + - path: output/reconst/kernels/A_052.npy md5sum: c5f520b60797e0dc88aefaa705442a82 - - path: output/compute/kernels/A_053.npy + - path: output/reconst/kernels/A_053.npy md5sum: 7355cc61e1edf1576f8d695285b56d88 - - path: output/compute/kernels/A_054.npy + - path: output/reconst/kernels/A_054.npy md5sum: dc5d18d1f95b9b1251c6f1007231dd41 - - path: output/compute/kernels/A_055.npy + - path: output/reconst/kernels/A_055.npy md5sum: 51815fd6d8717d7bf729bbd8f0215bbf - - path: output/compute/kernels/A_056.npy + - path: output/reconst/kernels/A_056.npy md5sum: ede3f55419c4e8a41de56ea2fa357c2f - - path: output/compute/kernels/A_057.npy + - path: output/reconst/kernels/A_057.npy md5sum: 0be644cc5e4e962f38d7a60d6894cf08 - - path: output/compute/kernels/A_058.npy + - path: output/reconst/kernels/A_058.npy md5sum: a60bfda5b16ceb96f6e3a4a26ff8913f - - path: output/compute/kernels/A_059.npy + - path: output/reconst/kernels/A_059.npy md5sum: 71b34384955060e2cb2cb4cf3329842b - - path: output/compute/kernels/A_060.npy + - path: output/reconst/kernels/A_060.npy md5sum: 90a2a4f915947c7d2ee99969ba394f16 - - path: output/compute/kernels/A_061.npy + - path: output/reconst/kernels/A_061.npy md5sum: 29127b6428928197cd999fba5cc15962 - - path: output/compute/kernels/A_062.npy + - path: output/reconst/kernels/A_062.npy md5sum: c4cb3322d1f57543f3a4c24bdd6542d0 - - path: output/compute/kernels/A_063.npy + - path: output/reconst/kernels/A_063.npy md5sum: f2a434d9087486822d9467aa9a2949c6 - - path: output/compute/kernels/A_064.npy + - path: output/reconst/kernels/A_064.npy md5sum: 14629aaf9c0381e548ffa2859f02c7f2 - - path: output/compute/kernels/A_065.npy + - path: output/reconst/kernels/A_065.npy md5sum: 82964736555dd0e33d107b8d1ba04b99 - - path: output/compute/kernels/A_066.npy + - path: output/reconst/kernels/A_066.npy md5sum: aa282cb662a2581296f8c6432bec5def - - path: output/compute/kernels/A_067.npy + - path: output/reconst/kernels/A_067.npy md5sum: a8273af0f3f0f78ec36a9256b8e8838b - - path: output/compute/kernels/A_068.npy + - path: output/reconst/kernels/A_068.npy md5sum: 0243467ee9f75aafda13e586d3292236 - - path: output/compute/kernels/A_069.npy + - path: output/reconst/kernels/A_069.npy md5sum: 70b6e1cf69afa7861cdc71209bff19e3 - - path: output/compute/kernels/A_070.npy + - path: output/reconst/kernels/A_070.npy md5sum: ee159bf58499762488db9a3e80038e09 - - path: output/compute/kernels/A_071.npy + - path: output/reconst/kernels/A_071.npy md5sum: c255bfec1bebe547bce036b3d276b158 - - path: output/compute/kernels/A_072.npy + - path: output/reconst/kernels/A_072.npy md5sum: 5a7785fdd6906e33bbad4a4610e39dc0 - - path: output/compute/kernels/A_073.npy + - path: output/reconst/kernels/A_073.npy md5sum: 83d41cc79f305d9a097a91f18250319d - - path: output/compute/kernels/A_074.npy + - path: output/reconst/kernels/A_074.npy md5sum: 4b30b4210b3e5c4e026a58892308a4e3 - - path: output/compute/kernels/A_075.npy + - path: output/reconst/kernels/A_075.npy md5sum: f6f83ea9451b3f96a7bbe015cc9c8c21 - - path: output/compute/kernels/A_076.npy + - path: output/reconst/kernels/A_076.npy md5sum: 40e72cd83f75039825e15d9ce4ed05e4 - - path: output/compute/kernels/A_077.npy + - path: output/reconst/kernels/A_077.npy md5sum: c961a829523e020ef5e0ebf1a733c19b - - path: output/compute/kernels/A_078.npy + - path: output/reconst/kernels/A_078.npy md5sum: c24066930825c7756342742df6331c82 - - path: output/compute/kernels/A_079.npy + - path: output/reconst/kernels/A_079.npy md5sum: 07ddc1e8a148442e83e6aca66e13acd5 - - path: output/compute/kernels/A_080.npy + - path: output/reconst/kernels/A_080.npy md5sum: 709ac9b7c8301153f0317b96cda75552 - - path: output/compute/kernels/A_081.npy + - path: output/reconst/kernels/A_081.npy md5sum: 8bede120aa60737b83b61d8d0aac582a - - path: output/compute/kernels/A_082.npy + - path: output/reconst/kernels/A_082.npy md5sum: 3945f973add1a6db95fe58e329ec91e7 - - path: output/compute/kernels/A_083.npy + - path: output/reconst/kernels/A_083.npy md5sum: 4c201bac152c7e9130830d8915886683 - - path: output/compute/kernels/A_084.npy + - path: output/reconst/kernels/A_084.npy md5sum: 233696d75a8e674a0820b0f4123b621b - - path: output/compute/kernels/A_085.npy + - path: output/reconst/kernels/A_085.npy md5sum: 783ed22541c2470a64236cfb9745a787 - - path: output/compute/kernels/A_086.npy + - path: output/reconst/kernels/A_086.npy md5sum: c4df82fe0257e943bff9bbfe97c8bd01 - - path: output/compute/kernels/A_087.npy + - path: output/reconst/kernels/A_087.npy md5sum: 69ffd05ba012ae164e882e2f0720492b - - path: output/compute/kernels/A_088.npy + - path: output/reconst/kernels/A_088.npy md5sum: 0b41b02520099e6945a34fdbcc5ee7a8 - - path: output/compute/kernels/A_089.npy + - path: output/reconst/kernels/A_089.npy md5sum: 0eea673e2d6ecfa072bc8e72c878cdcd - - path: output/compute/kernels/A_090.npy + - path: output/reconst/kernels/A_090.npy md5sum: be79cf920ab80d7f6ebd04ec878eb6f6 - - path: output/compute/kernels/A_091.npy + - path: output/reconst/kernels/A_091.npy md5sum: a33b58f20a68bdc59f262c714c0f5616 - - path: output/compute/kernels/A_092.npy + - path: output/reconst/kernels/A_092.npy md5sum: d0f3e9e8b8ce77c4c91a95f6bf91dadf - - path: output/compute/kernels/A_093.npy + - path: output/reconst/kernels/A_093.npy md5sum: 8af839b27f6b1b3e0c712c73b9ca95d5 - - path: output/compute/kernels/A_094.npy + - path: output/reconst/kernels/A_094.npy md5sum: 33045a5adb24c5a4c04e28f1495e4af2 - - path: output/compute/kernels/A_095.npy + - path: output/reconst/kernels/A_095.npy md5sum: 747034be17ce34ac1837a7ef4a785675 - - path: output/compute/kernels/A_096.npy + - path: output/reconst/kernels/A_096.npy md5sum: 93d719b74fb1fb304784aece5b6c5ab4 - - path: output/compute/kernels/A_097.npy + - path: output/reconst/kernels/A_097.npy md5sum: 013fa7ae6870925e17fb73ee78809502 - - path: output/compute/kernels/A_098.npy + - path: output/reconst/kernels/A_098.npy md5sum: 096edd9c291d4f9d85152b50e86a3112 - - path: output/compute/kernels/A_099.npy + - path: output/reconst/kernels/A_099.npy md5sum: 08b65ea77d226c1f7d5876cee415ce4a - - path: output/compute/kernels/A_100.npy + - path: output/reconst/kernels/A_100.npy md5sum: dd38ec6122813be0450bc9a89401b040 - - path: output/compute/kernels/A_101.npy + - path: output/reconst/kernels/A_101.npy md5sum: 013c4e58cfe8deda708575a2bf51ec43 - - path: output/compute/kernels/A_102.npy + - path: output/reconst/kernels/A_102.npy md5sum: 92f56a865fda5830512f30b158472357 - - path: output/compute/kernels/A_103.npy + - path: output/reconst/kernels/A_103.npy md5sum: 6cc2c4cb5259aa5a08eb87ae78eabd2f - - path: output/compute/kernels/A_104.npy + - path: output/reconst/kernels/A_104.npy md5sum: c286597b7b9101f9fbde1a5a89c9aa57 - - path: output/compute/kernels/A_105.npy + - path: output/reconst/kernels/A_105.npy md5sum: a193d8436a95481f3dcd9a2fb22bf8c1 - - path: output/compute/kernels/A_106.npy + - path: output/reconst/kernels/A_106.npy md5sum: 8e7e770817081413839881b31af40684 - - path: output/compute/kernels/A_107.npy + - path: output/reconst/kernels/A_107.npy md5sum: 8d5987397d5f7fc4e13c4bd94a9ed0ff - - path: output/compute/kernels/A_108.npy + - path: output/reconst/kernels/A_108.npy md5sum: 83ee0ee52c845957cf5ce16cb343fb57 - - path: output/compute/kernels/A_109.npy + - path: output/reconst/kernels/A_109.npy md5sum: 6c25ca897da95f16e92384024a9ff6f8 - - path: output/compute/kernels/A_110.npy + - path: output/reconst/kernels/A_110.npy md5sum: dd40c52181be77afeaa77bdc611fb6d4 - - path: output/compute/kernels/A_111.npy + - path: output/reconst/kernels/A_111.npy md5sum: 8ce666c5c5c954c051a0344af7405571 - - path: output/compute/kernels/A_112.npy + - path: output/reconst/kernels/A_112.npy md5sum: 0f05bf2d7694ca3572ba495fa46ee81f - - path: output/compute/kernels/A_113.npy + - path: output/reconst/kernels/A_113.npy md5sum: ff3fae915eceb71c3d550245f4467752 - - path: output/compute/kernels/A_114.npy + - path: output/reconst/kernels/A_114.npy md5sum: b82049618788b0450e2b43c5f6d8119d - - path: output/compute/kernels/A_115.npy + - path: output/reconst/kernels/A_115.npy md5sum: ac9a6075479974c1fecd9352550456b6 - - path: output/compute/kernels/A_116.npy + - path: output/reconst/kernels/A_116.npy md5sum: 09482147106b80d7bd6fda72bf8baf86 - - path: output/compute/kernels/A_117.npy + - path: output/reconst/kernels/A_117.npy md5sum: c93cc75f20ad3cd089ba4a63179fe67b - - path: output/compute/kernels/A_118.npy + - path: output/reconst/kernels/A_118.npy md5sum: 4c1f5ad2c1e7ae137e0172fb130107b0 - - path: output/compute/kernels/A_119.npy + - path: output/reconst/kernels/A_119.npy md5sum: 54e168db1229f90809eef6f99db87313 - - path: output/compute/kernels/A_120.npy + - path: output/reconst/kernels/A_120.npy md5sum: 44aea9be1bebcc33ca4fc117fda3c381 - - path: output/compute/kernels/A_121.npy + - path: output/reconst/kernels/A_121.npy md5sum: bf30fc934d14d27298bc5fca0c628797 - - path: output/compute/kernels/A_122.npy + - path: output/reconst/kernels/A_122.npy md5sum: 6f69fda57c4e7630f554e55ecc18e9de - - path: output/compute/kernels/A_123.npy + - path: output/reconst/kernels/A_123.npy md5sum: e61656ac07e14b47b51cfd81076a36ad - - path: output/compute/kernels/A_124.npy + - path: output/reconst/kernels/A_124.npy md5sum: a20061f96654459c07919e96369f6bbd - - path: output/compute/kernels/A_125.npy + - path: output/reconst/kernels/A_125.npy md5sum: 5ece1efe1e275f67cabc7b36eba4cbb8 - - path: output/compute/kernels/A_126.npy + - path: output/reconst/kernels/A_126.npy md5sum: c42eefa83a7342fec1bedce2bd57cc3d - - path: output/compute/kernels/A_127.npy + - path: output/reconst/kernels/A_127.npy md5sum: 5dc4f61d3cdddda0dbd94333a3dd7c88 - - path: output/compute/kernels/A_128.npy + - path: output/reconst/kernels/A_128.npy md5sum: 1facea6c3dcd3c2c16242987d65916a2 - - path: output/compute/kernels/A_129.npy + - path: output/reconst/kernels/A_129.npy md5sum: 779b671302e80805766851b8788a561b - - path: output/compute/kernels/A_130.npy + - path: output/reconst/kernels/A_130.npy md5sum: 8c116cc4b794f11122ea805bb1ba638b - - path: output/compute/kernels/A_131.npy + - path: output/reconst/kernels/A_131.npy md5sum: 08537ed38e1cd6acc6109ea9b042751a - - path: output/compute/kernels/A_132.npy + - path: output/reconst/kernels/A_132.npy md5sum: d69051b321a5e88037092ee5c95d99a4 - - path: output/compute/kernels/A_133.npy + - path: output/reconst/kernels/A_133.npy md5sum: beed98dcb37f25f4b8ce36b028c5aa32 - - path: output/compute/kernels/A_134.npy + - path: output/reconst/kernels/A_134.npy md5sum: d04ce67db73a8c879e7353c2b4c826ae - - path: output/compute/kernels/A_135.npy + - path: output/reconst/kernels/A_135.npy md5sum: dd502124b6581bd38b02acca47adbf6f - - path: output/compute/kernels/A_136.npy + - path: output/reconst/kernels/A_136.npy md5sum: ad932cbfe06c66a9ac3c3a1b02c75e47 - - path: output/compute/kernels/A_137.npy + - path: output/reconst/kernels/A_137.npy md5sum: 442480b9820c11cdd36ce119f9ebdbf6 - - path: output/compute/kernels/A_138.npy + - path: output/reconst/kernels/A_138.npy md5sum: e9b8ab1a2fd6f93b5584d8b23b752ef2 - - path: output/compute/kernels/A_139.npy + - path: output/reconst/kernels/A_139.npy md5sum: f53cbf9709a8d58de37ce719820b86dc - - path: output/compute/kernels/A_140.npy + - path: output/reconst/kernels/A_140.npy md5sum: 5f282c2887dbc956c72a80b65723f4ad - - path: output/compute/kernels/A_141.npy + - path: output/reconst/kernels/A_141.npy md5sum: 66da08fff4ea0637d0f44310f3edeb2f - - path: output/compute/kernels/A_142.npy + - path: output/reconst/kernels/A_142.npy md5sum: 565543843cdc7d2d48b89d2af794f37b - - path: output/compute/kernels/A_143.npy + - path: output/reconst/kernels/A_143.npy md5sum: 49b31341747a3fc81b9c7bfe65654ff7 - - path: output/compute/kernels/A_144.npy + - path: output/reconst/kernels/A_144.npy md5sum: 6a2a6393d7c6ccdf27cf4885c5cb34dd - - path: output/compute/kernels/A_145.npy + - path: output/reconst/kernels/A_145.npy md5sum: a26e3876df3c02b399f2d21bbd3ebf3a - - path: output/compute/versions.yml + - path: output/reconst/versions.yml From 5c36be797e77eac9602e69c3cdf1c3c1c708d05e Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 01:57:21 +0000 Subject: [PATCH 30/41] fix diffusivity priors --- modules/nf-scil/preproc/n4/main.nf | 12 +++--- .../nf-scil/reconst/diffusivitypriors/main.nf | 43 +++++++++++-------- tests/config/test_data.config | 11 +++-- .../nf-scil/reconst/diffusivitypriors/main.nf | 12 +++--- .../reconst/diffusivitypriors/nextflow.config | 1 - .../reconst/diffusivitypriors/test.yml | 24 +++++------ 6 files changed, 56 insertions(+), 47 deletions(-) diff --git a/modules/nf-scil/preproc/n4/main.nf b/modules/nf-scil/preproc/n4/main.nf index e568d58cd..a5c4687e5 100644 --- a/modules/nf-scil/preproc/n4/main.nf +++ b/modules/nf-scil/preproc/n4/main.nf @@ -40,9 +40,9 @@ process PREPROC_N4 { ${prefix}__dwi_n4.nii.gz --mask $b0_mask -f cat <<-END_VERSIONS > versions.yml - "${task.process}": - scilpy: 1.6.0 - N4BiasFieldCorrection: \$(N4BiasFieldCorrection --version 2>&1 | sed -n 's/ANTs Version: v\\([0-9.]\\+\\)/\\1/p') + "${task.process}": + scilpy: 1.6.0 + N4BiasFieldCorrection: \$(N4BiasFieldCorrection --version 2>&1 | sed -n 's/ANTs Version: v\\([0-9.]\\+\\)/\\1/p') END_VERSIONS """ @@ -56,9 +56,9 @@ process PREPROC_N4 { touch ${prefix}_dwi_n4.nii.gz cat <<-END_VERSIONS > versions.yml - "${task.process}": - scilpy: 1.6.0 - N4BiasFieldCorrection: \$(N4BiasFieldCorrection --version 2>&1 | sed -n 's/ANTs Version: v\\([0-9.]\\+\\)/\\1/p') + "${task.process}": + scilpy: 1.6.0 + N4BiasFieldCorrection: \$(N4BiasFieldCorrection --version 2>&1 | sed -n 's/ANTs Version: v\\([0-9.]\\+\\)/\\1/p') END_VERSIONS """ } diff --git a/modules/nf-scil/reconst/diffusivitypriors/main.nf b/modules/nf-scil/reconst/diffusivitypriors/main.nf index a6d440c42..d9e3dcae6 100644 --- a/modules/nf-scil/reconst/diffusivitypriors/main.nf +++ b/modules/nf-scil/reconst/diffusivitypriors/main.nf @@ -1,4 +1,20 @@ +import nextflow.util.BlankSeparatedList + +def compute_noddi_priors ( fa, ad, md, fa_min, fa_max, md_min, roi_radius, prefix, output_directory ) { + """ + mkdir -p $output_directory + + scil_compute_NODDI_priors.py $fa $ad $md $fa_min $fa_max $md_min $roi_radius \ + --out_txt_1fiber $output_directory/${prefix}__para_diff.txt \ + --out_txt_ventricles $output_directory/${prefix}__iso_diff.txt + """ +} + + +def is_directory ( pathlike ) { + return !(pathlike instanceof BlankSeparatedList) && pathlike.isDirectory() +} process RECONST_DIFFUSIVITYPRIORS { tag "$meta.id" @@ -30,26 +46,15 @@ process RECONST_DIFFUSIVITYPRIORS { def md_min = task.ext.md_min ? "--md_min " + task.ext.md_min : "" def roi_radius = task.ext.roi_radius ? "--roi_radius " + task.ext.roi_radius : "" + def priors_directory = priors.isEmpty() ? "priors" : !is_directory(priors) ? "priors" : priors """ - if [ -d "$priors" ] - then - cat $priors/*__para_diff.txt > all_para_diff.txt - awk '{ total += \$1; count++ } END { print total/count }' all_para_diff.txt > mean_para_diff.txt - cat $priors/*__iso_diff.txt > all_iso_diff.txt - awk '{ total += \$1; count++ } END { print total/count }' all_iso_diff.txt > mean_iso_diff.txt - - else - - mkdir priors - scil_compute_NODDI_priors.py $fa $ad $md $fa_min $fa_max $md_min $roi_radius\ - --out_txt_1fiber priors/${prefix}__para_diff.txt\ - --out_txt_ventricles priors/${prefix}__iso_diff.txt - - cat priors/*__para_diff.txt > all_para_diff.txt - awk '{ total += \$1; count++ } END { print total/count }' all_para_diff.txt > mean_para_diff.txt - cat priors/*__iso_diff.txt > all_iso_diff.txt - awk '{ total += \$1; count++ } END { print total/count }' all_iso_diff.txt > mean_iso_diff.txt - fi + ${ priors.isEmpty() ? compute_noddi_priors( fa, ad, md, fa_min, fa_max, md_min, roi_radius, prefix, priors_directory ) : ""} + + ${ !priors.isEmpty() && !is_directory(priors) ? "mkdir -p priors && ln $priors priors" : "" } + cat $priors_directory/*__para_diff.txt > all_para_diff.txt + awk '{ total += \$1; count++ } END { print total/count }' all_para_diff.txt > mean_para_diff.txt + cat $priors_directory/*__iso_diff.txt > all_iso_diff.txt + awk '{ total += \$1; count++ } END { print total/count }' all_iso_diff.txt > mean_iso_diff.txt cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/config/test_data.config b/tests/config/test_data.config index f8693824c..4a06150f7 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -46,8 +46,13 @@ params { bval = "${params.test_data_base}/heavy/dwi/dwi.bval" bvec = "${params.test_data_base}/heavy/dwi/dwi.bvec" } - "diffusivitypriors"{ - priors = "${params.test_data_base}/light/priors" + "diffusivitypriors"{ + priors = [ + "${params.test_data_base}/light/priors/s1__iso_diff.txt", + "${params.test_data_base}/light/priors/s1__para_diff.txt", + "${params.test_data_base}/light/priors/s2__iso_diff.txt", + "${params.test_data_base}/light/priors/s2__para_diff.txt" + ] } "frf" { dwi = "${params.test_data_base}/heavy/dwi/dwi.nii.gz" @@ -87,7 +92,7 @@ params { "extractb0" { dwi = "${params.test_data_base}/heavy/dwi/dwi.nii.gz" bval = "${params.test_data_base}/heavy/dwi/dwi.bval" - bvec = "${params.test_data_base}/heavy/dwi/dwi.bvec" + bvec = "${params.test_data_base}/heavy/dwi/dwi.bvec" } } } diff --git a/tests/modules/nf-scil/reconst/diffusivitypriors/main.nf b/tests/modules/nf-scil/reconst/diffusivitypriors/main.nf index 3608a9620..aab4fdd92 100644 --- a/tests/modules/nf-scil/reconst/diffusivitypriors/main.nf +++ b/tests/modules/nf-scil/reconst/diffusivitypriors/main.nf @@ -2,12 +2,12 @@ nextflow.enable.dsl = 2 -include { RECONST_DIFFUSIVITYPRIORS as MEAN_PRIORS} from '../../../../../modules/nf-scil/reconst/diffusivitypriors/main.nf' -include { RECONST_DIFFUSIVITYPRIORS as COMPUTE_PRIORS} from '../../../../../modules/nf-scil/reconst/diffusivitypriors/main.nf' +include { RECONST_DIFFUSIVITYPRIORS as RECONST_MEAN_PRIORS} from '../../../../../modules/nf-scil/reconst/diffusivitypriors/main.nf' +include { RECONST_DIFFUSIVITYPRIORS as RECONST_COMPUTE_PRIORS} from '../../../../../modules/nf-scil/reconst/diffusivitypriors/main.nf' include { LOAD_TEST_DATA } from '../../../../../subworkflows/nf-scil/load_test_data/main.nf' workflow test_reconst_diffusivitypriors_compute_priors { - + input_fetch = Channel.from( [ "commit_amico.zip" ] ) LOAD_TEST_DATA ( input_fetch, "test.load-test-data" ) @@ -21,7 +21,7 @@ workflow test_reconst_diffusivitypriors_compute_priors { [] ]} - COMPUTE_PRIORS ( input_priors ) + RECONST_COMPUTE_PRIORS ( input_priors ) } workflow test_reconst_diffusivitypriors_mean_priors { @@ -29,10 +29,10 @@ workflow test_reconst_diffusivitypriors_mean_priors { input = [ [ id:'test', single_end:false ], // meta map [],[],[], - file(params.test_data['reconst']['diffusivitypriors']['priors'], checkIfExists: true) + params.test_data['reconst']['diffusivitypriors']['priors'].collect{ file(it, checkIfExists: true) } ] - MEAN_PRIORS ( input ) + RECONST_MEAN_PRIORS ( input ) } diff --git a/tests/modules/nf-scil/reconst/diffusivitypriors/nextflow.config b/tests/modules/nf-scil/reconst/diffusivitypriors/nextflow.config index fa8c4a0a7..6de8b6e47 100644 --- a/tests/modules/nf-scil/reconst/diffusivitypriors/nextflow.config +++ b/tests/modules/nf-scil/reconst/diffusivitypriors/nextflow.config @@ -2,7 +2,6 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: "RECONST_DIFFUSIVITYPRIORS" { - publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } ext.fa_min = 0.7 ext.fa_max = 0.1 ext.md_min = 0.003 diff --git a/tests/modules/nf-scil/reconst/diffusivitypriors/test.yml b/tests/modules/nf-scil/reconst/diffusivitypriors/test.yml index 326dc17ac..b7a7f768b 100644 --- a/tests/modules/nf-scil/reconst/diffusivitypriors/test.yml +++ b/tests/modules/nf-scil/reconst/diffusivitypriors/test.yml @@ -4,15 +4,15 @@ - reconst - reconst/diffusivitypriors files: - - path: output/compute/mean_iso_diff.txt + - path: output/reconst/mean_iso_diff.txt md5sum: 3f13db606b697f920d21efd3c9cf8838 - - path: output/compute/mean_para_diff.txt + - path: output/reconst/mean_para_diff.txt md5sum: b5ab394820e011a04e3787be3772ad6e - - path: output/compute/priors/test__iso_diff.txt + - path: output/reconst/priors/test__iso_diff.txt md5sum: 3f13db606b697f920d21efd3c9cf8838 - - path: output/compute/priors/test__para_diff.txt + - path: output/reconst/priors/test__para_diff.txt md5sum: b5ab394820e011a04e3787be3772ad6e - - path: output/compute/versions.yml + - path: output/reconst/versions.yml - name: reconst diffusivitypriors test_reconst_diffusivitypriors_mean_priors command: nextflow run ./tests/modules/nf-scil/reconst/diffusivitypriors -entry test_reconst_diffusivitypriors_mean_priors -c ./tests/config/nextflow.config @@ -20,16 +20,16 @@ - reconst - reconst/diffusivitypriors files: - - path: output/mean/mean_iso_diff.txt + - path: output/reconst/mean_iso_diff.txt md5sum: b048050981231e15bfafcda4536e2d05 - - path: output/mean/mean_para_diff.txt + - path: output/reconst/mean_para_diff.txt md5sum: 4ef4f76ce3a673249bcbaa0eba9ce832 - - path: output/mean/priors/s1__iso_diff.txt + - path: output/reconst/priors/s1__iso_diff.txt md5sum: 3f13db606b697f920d21efd3c9cf8838 - - path: output/mean/priors/s1__para_diff.txt + - path: output/reconst/priors/s1__para_diff.txt md5sum: b5ab394820e011a04e3787be3772ad6e - - path: output/mean/priors/s2__iso_diff.txt + - path: output/reconst/priors/s2__iso_diff.txt md5sum: 5b003c147cc5f202d2e8c61acaa5eaea - - path: output/mean/priors/s2__para_diff.txt + - path: output/reconst/priors/s2__para_diff.txt md5sum: 202cb21086439607b99b81ead3e7f817 - - path: output/mean/versions.yml + - path: output/reconst/versions.yml From 416f25b5a262c72b0b7146da38a9a03306f2a21e Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 02:37:36 +0000 Subject: [PATCH 31/41] fix N4 --- modules/nf-scil/preproc/n4/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/nf-scil/preproc/n4/main.nf b/modules/nf-scil/preproc/n4/main.nf index a5c4687e5..11578d570 100644 --- a/modules/nf-scil/preproc/n4/main.nf +++ b/modules/nf-scil/preproc/n4/main.nf @@ -27,7 +27,7 @@ process PREPROC_N4 { export OPENBLAS_NUM_THREADS=1 export ANTS_RANDOM_SEED=1234 - spacing=\$(mrinfo -spacing $b0 | tr " " "\n" | sort -n | tail -1) + spacing=\$(mrinfo -spacing $b0 | tr " " "\\n" | sort -n | tail -1) knot_spacing=\$(echo "\$spacing/$bspline_knot_per_voxel" | bc -l) N4BiasFieldCorrection -i $b0\ From cbe480ac18c514ff60683bbfa320dfeb4c2e7c3d Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:00:21 +0000 Subject: [PATCH 32/41] fix extractb0 --- tests/modules/nf-scil/utils/extractb0/main.nf | 32 +++++++++---------- .../nf-scil/utils/extractb0/nextflow.config | 12 +++---- .../modules/nf-scil/utils/extractb0/test.yml | 30 ++++++++--------- 3 files changed, 37 insertions(+), 37 deletions(-) diff --git a/tests/modules/nf-scil/utils/extractb0/main.nf b/tests/modules/nf-scil/utils/extractb0/main.nf index 3c0d5edba..d6599db29 100644 --- a/tests/modules/nf-scil/utils/extractb0/main.nf +++ b/tests/modules/nf-scil/utils/extractb0/main.nf @@ -3,14 +3,14 @@ nextflow.enable.dsl = 2 include { - UTILS_EXTRACTB0 as EXTRACTB0_MEAN; - UTILS_EXTRACTB0 as EXTRACTB0_ALL4D; - UTILS_EXTRACTB0 as EXTRACTB0_ALLSERIES; - UTILS_EXTRACTB0 as EXTRACTB0_CLUSTERMEAN; - UTILS_EXTRACTB0 as EXTRACTB0_CLUSTERFIRST} from '../../../../../modules/nf-scil/utils/extractb0/main.nf' + UTILS_EXTRACTB0 as UTILS_EXTRACTB0_MEAN; + UTILS_EXTRACTB0 as UTILS_EXTRACTB0_ALL4D; + UTILS_EXTRACTB0 as UTILS_EXTRACTB0_ALLSERIES; + UTILS_EXTRACTB0 as UTILS_EXTRACTB0_CLUSTERMEAN; + UTILS_EXTRACTB0 as UTILS_EXTRACTB0_CLUSTERFIRST} from '../../../../../modules/nf-scil/utils/extractb0/main.nf' workflow test_utils_extractb0_mean { - + input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['utils']['extractb0']['dwi'], checkIfExists: true), @@ -18,11 +18,11 @@ workflow test_utils_extractb0_mean { file(params.test_data['utils']['extractb0']['bvec'], checkIfExists: true) ] - EXTRACTB0_MEAN ( input ) + UTILS_EXTRACTB0_MEAN ( input ) } workflow test_utils_extractb0_all_4D { - + input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['utils']['extractb0']['dwi'], checkIfExists: true), @@ -30,11 +30,11 @@ workflow test_utils_extractb0_all_4D { file(params.test_data['utils']['extractb0']['bvec'], checkIfExists: true) ] - EXTRACTB0_ALL4D ( input ) + UTILS_EXTRACTB0_ALL4D ( input ) } workflow test_utils_extractb0_all_series { - + input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['utils']['extractb0']['dwi'], checkIfExists: true), @@ -42,11 +42,11 @@ workflow test_utils_extractb0_all_series { file(params.test_data['utils']['extractb0']['bvec'], checkIfExists: true) ] - EXTRACTB0_ALLSERIES ( input ) + UTILS_EXTRACTB0_ALLSERIES ( input ) } workflow test_utils_extractb0_cluster_mean { - + input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['utils']['extractb0']['dwi'], checkIfExists: true), @@ -54,11 +54,11 @@ workflow test_utils_extractb0_cluster_mean { file(params.test_data['utils']['extractb0']['bvec'], checkIfExists: true) ] - EXTRACTB0_CLUSTERMEAN ( input ) + UTILS_EXTRACTB0_CLUSTERMEAN ( input ) } workflow test_utils_extractb0_cluster_first { - EXTRACTB0_CLUSTERFIRST + input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['utils']['extractb0']['dwi'], checkIfExists: true), @@ -66,5 +66,5 @@ workflow test_utils_extractb0_cluster_first { file(params.test_data['utils']['extractb0']['bvec'], checkIfExists: true) ] - EXTRACTB0_CLUSTERFIRST ( input ) -} \ No newline at end of file + UTILS_EXTRACTB0_CLUSTERFIRST ( input ) +} diff --git a/tests/modules/nf-scil/utils/extractb0/nextflow.config b/tests/modules/nf-scil/utils/extractb0/nextflow.config index e41bc99b6..a3b211bff 100644 --- a/tests/modules/nf-scil/utils/extractb0/nextflow.config +++ b/tests/modules/nf-scil/utils/extractb0/nextflow.config @@ -7,24 +7,24 @@ process { ext.output_single_volume = true } - withName: "EXTRACTB0_MEAN" { + withName: "UTILS_EXTRACTB0_MEAN" { ext.b0_extraction_strategy = "mean" } - withName: "EXTRACTB0_ALL4D" { + withName: "UTILS_EXTRACTB0_ALL4D" { ext.b0_extraction_strategy = "all" } - withName: "EXTRACTB0_ALLSERIES" { + withName: "UTILS_EXTRACTB0_ALLSERIES" { ext.b0_extraction_strategy = "all" ext.output_single_volume = false } - withName: "EXTRACTB0_CLUSTERMEAN" { + withName: "UTILS_EXTRACTB0_CLUSTERMEAN" { ext.b0_extraction_strategy = "cluster-mean" } - withName: "EXTRACTB0_CLUSTERFIRST" { + withName: "UTILS_EXTRACTB0_CLUSTERFIRST" { ext.b0_extraction_strategy = "cluster-first" } -} \ No newline at end of file +} diff --git a/tests/modules/nf-scil/utils/extractb0/test.yml b/tests/modules/nf-scil/utils/extractb0/test.yml index 16f90263d..d93a77379 100644 --- a/tests/modules/nf-scil/utils/extractb0/test.yml +++ b/tests/modules/nf-scil/utils/extractb0/test.yml @@ -1,49 +1,49 @@ - name: utils extractb0 test_utils_extractb0_mean command: nextflow run ./tests/modules/nf-scil/utils/extractb0 -entry test_utils_extractb0_mean -c ./tests/config/nextflow.config tags: - - utils/extractb0 - utils + - utils/extractb0 files: - - path: output/extractb0/test_b0.nii.gz + - path: output/utils/test_b0.nii.gz md5sum: 9870c6a4fea2c94453d061012c8507b1 - - path: output/extractb0/versions.yml + - path: output/utils/versions.yml - name: utils extractb0 test_utils_extractb0_all_4D command: nextflow run ./tests/modules/nf-scil/utils/extractb0 -entry test_utils_extractb0_all_4D -c ./tests/config/nextflow.config tags: - - utils/extractb0 - utils + - utils/extractb0 files: - - path: output/extractb0/test_b0.nii.gz + - path: output/utils/test_b0.nii.gz md5sum: 48ef1812ca653bcb3370e9a75ff3fd5e - - path: output/extractb0/versions.yml + - path: output/utils/versions.yml - name: utils extractb0 test_utils_extractb0_all_series command: nextflow run ./tests/modules/nf-scil/utils/extractb0 -entry test_utils_extractb0_all_series -c ./tests/config/nextflow.config tags: - - utils/extractb0 - utils + - utils/extractb0 files: - - path: output/extractb0/test_b0.nii.gz + - path: output/utils/test_b0.nii.gz md5sum: 48ef1812ca653bcb3370e9a75ff3fd5e - - path: output/extractb0/versions.yml + - path: output/utils/versions.yml - name: utils extractb0 test_utils_extractb0_cluster_mean command: nextflow run ./tests/modules/nf-scil/utils/extractb0 -entry test_utils_extractb0_cluster_mean -c ./tests/config/nextflow.config tags: - - utils/extractb0 - utils + - utils/extractb0 files: - - path: output/extractb0/test_b0.nii.gz + - path: output/utils/test_b0.nii.gz md5sum: 48ef1812ca653bcb3370e9a75ff3fd5e - - path: output/extractb0/versions.yml + - path: output/utils/versions.yml - name: utils extractb0 test_utils_extractb0_cluster_first command: nextflow run ./tests/modules/nf-scil/utils/extractb0 -entry test_utils_extractb0_cluster_first -c ./tests/config/nextflow.config tags: - - utils/extractb0 - utils + - utils/extractb0 files: - - path: output/extractb0/test_b0.nii.gz + - path: output/utils/test_b0.nii.gz md5sum: 48ef1812ca653bcb3370e9a75ff3fd5e - - path: output/extractb0/versions.yml + - path: output/utils/versions.yml From ec7a854f3220d405c134bfb3ceff7340a5438d14 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:08:48 +0000 Subject: [PATCH 33/41] remove broken test cases for now --- tests/config/pytest_modules.yml | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 82d56e96a..e30e09140 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -26,9 +26,10 @@ preproc/normalize: - modules/nf-scil/preproc/normalize/** - tests/modules/nf-scil/preproc/normalize/** -tracking/localtracking: - - modules/nf-scil/tracking/localtracking/** - - tests/modules/nf-scil/tracking/localtracking/** +# Reinstate when test data becomes valid again +#tracking/localtracking: +# - modules/nf-scil/tracking/localtracking/** +# - tests/modules/nf-scil/tracking/localtracking/** reconst/diffusivitypriors: - modules/nf-scil/reconst/diffusivitypriors/** @@ -38,17 +39,19 @@ reconst/dtimetrics: - modules/nf-scil/reconst/dtimetrics/** - tests/modules/nf-scil/reconst/dtimetrics/** -reconst/fodf: - - modules/nf-scil/reconst/fodf/** - - tests/modules/nf-scil/reconst/fodf/** +# Reinstate when test data becomes valid again +#reconst/fodf: +# - modules/nf-scil/reconst/fodf/** +# - tests/modules/nf-scil/reconst/fodf/** reconst/frf: - modules/nf-scil/reconst/frf/** - tests/modules/nf-scil/reconst/frf/** -reconst/meanfrf: - - modules/nf-scil/reconst/meanfrf/** - - tests/modules/nf-scil/reconst/meanfrf/** +# Reinstate when test data becomes valid again +#reconst/meanfrf: +# - modules/nf-scil/reconst/meanfrf/** +# - tests/modules/nf-scil/reconst/meanfrf/** reconst/noddi: - modules/nf-scil/reconst/noddi/** @@ -66,9 +69,10 @@ subworkflows/load_test_data: - subworkflows/nf-scil/load_test_data/** - tests/subworkflows/nf-scil/load_test_data/** -tracking/pfttracking: - - modules/nf-scil/tracking/pfttracking/** - - tests/modules/nf-scil/tracking/pfttracking/** +# Reinstate when test data becomes valid again +#tracking/pfttracking: +# - modules/nf-scil/tracking/pfttracking/** +# - tests/modules/nf-scil/tracking/pfttracking/** utils/extractb0: - modules/nf-scil/utils/extractb0/** From 004def3abdc7eaaebffc064bf10895ad40f04037 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:18:04 +0000 Subject: [PATCH 34/41] add yaml-schema link for linting --- modules/yaml-schema.json | 1 + 1 file changed, 1 insertion(+) create mode 120000 modules/yaml-schema.json diff --git a/modules/yaml-schema.json b/modules/yaml-schema.json new file mode 120000 index 000000000..d84030e9d --- /dev/null +++ b/modules/yaml-schema.json @@ -0,0 +1 @@ +meta-schema.json \ No newline at end of file From 5bf2844db0b53889bb66710ae9c8eba0f8d41db3 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:27:22 +0000 Subject: [PATCH 35/41] add last requirements for linting and testing --- modules/nf-scil/betcrop/cropvolume/meta.yml | 6 +++--- pytest.ini | 3 --- tests/config/nextflow.config | 2 +- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/modules/nf-scil/betcrop/cropvolume/meta.yml b/modules/nf-scil/betcrop/cropvolume/meta.yml index 0930e475e..5ff860e51 100644 --- a/modules/nf-scil/betcrop/cropvolume/meta.yml +++ b/modules/nf-scil/betcrop/cropvolume/meta.yml @@ -46,9 +46,9 @@ output: pattern: "*.pkl" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@AlexVCaron" diff --git a/pytest.ini b/pytest.ini index 652bdf8e5..02a8acac0 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,6 +1,3 @@ [pytest] filterwarnings = ignore::pytest.PytestRemovedIn8Warning:_pytest.nodes:140 -testpaths = - tests -python_files = test_*.py diff --git a/tests/config/nextflow.config b/tests/config/nextflow.config index 06f1e00c0..d40d2f932 100644 --- a/tests/config/nextflow.config +++ b/tests/config/nextflow.config @@ -25,7 +25,7 @@ if ("$PROFILE" == "singularity") { } // We need to consider switch to quay for hosting, see what the free plan offers -// docker.registry = 'quay.io' +docker.registry = 'hub.docker.com' // podman.registry = 'quay.io' // singularity.registry = 'quay.io' From 596f12a22e9da7fa7388a216cffcda4a5655dd28 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:45:41 +0000 Subject: [PATCH 36/41] last fixes --- .devcontainer/devcontainer.json | 3 +-- sync-nf-core.sh | 8 ++++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index fab32d98e..0f627bb5e 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -59,6 +59,5 @@ } }, "init": true, - "privileged": true, - "runArgs": ["--mount", "type=volume,target=/tmp/"] + "privileged": true } diff --git a/sync-nf-core.sh b/sync-nf-core.sh index 6ecf36ed4..c6c3cd75d 100755 --- a/sync-nf-core.sh +++ b/sync-nf-core.sh @@ -9,6 +9,14 @@ wget -O /workspaces/nf-scil/.editorconfig \ wget -O /workspaces/nf-scil/.prettierignore \ https://github.com/nf-core/tools/raw/$nfcore_ver/.prettierignore +echo ".github" >> /workspaces/nf-scil/.prettierignore +echo ".devcontainer" >> /workspaces/nf-scil/.prettierignore +echo ".vscode" >> /workspaces/nf-scil/.prettierignore +echo "venv" >> /workspaces/nf-scil/.prettierignore +echo ".venv" >> /workspaces/nf-scil/.prettierignore +echo ".test_data" >> /workspaces/nf-scil/.prettierignore +echo ".pytest_cache" >> /workspaces/nf-scil/.prettierignore + wget -O /workspaces/nf-scil/.prettierrc.yml \ https://github.com/nf-core/tools/raw/$nfcore_ver/.prettierrc.yml From f5ce5d6e6e08dec117e656b316104fc0c6cc20f8 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:47:17 +0000 Subject: [PATCH 37/41] fic docker endpoint --- tests/config/nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/config/nextflow.config b/tests/config/nextflow.config index d40d2f932..bd54912b3 100644 --- a/tests/config/nextflow.config +++ b/tests/config/nextflow.config @@ -25,7 +25,7 @@ if ("$PROFILE" == "singularity") { } // We need to consider switch to quay for hosting, see what the free plan offers -docker.registry = 'hub.docker.com' +docker.registry = 'docker.com' // podman.registry = 'quay.io' // singularity.registry = 'quay.io' From 1a8ab54ecff9c36800d2c85e51c051a619240c1e Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:49:38 +0000 Subject: [PATCH 38/41] fix editoconfig --- .devcontainer/devcontainer.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 0f627bb5e..b6f80412a 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -9,10 +9,10 @@ "ghcr.io/devcontainers/features/github-cli:1": {}, "ghcr.io/devcontainers/features/docker-in-docker:2": {}, "ghcr.io/devcontainers-contrib/features/apt-get-packages:1": {}, - "ghcr.io/robsyme/features/nextflow:1": {}, - "ghcr.io/devcontainers-contrib/features/curl-apt-get:1": {}, - "ghcr.io/devcontainers-contrib/features/tmux-apt-get:1": {}, - "ghcr.io/devcontainers-contrib/features/wget-apt-get:1": {} + "ghcr.io/robsyme/features/nextflow:1": {}, + "ghcr.io/devcontainers-contrib/features/curl-apt-get:1": {}, + "ghcr.io/devcontainers-contrib/features/tmux-apt-get:1": {}, + "ghcr.io/devcontainers-contrib/features/wget-apt-get:1": {} }, "mounts": [ { From 003c998ec6d251f4dd1ef8dda37cdee39e6eeb0a Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 03:52:10 +0000 Subject: [PATCH 39/41] fix docker --- tests/config/nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/config/nextflow.config b/tests/config/nextflow.config index bd54912b3..9da264135 100644 --- a/tests/config/nextflow.config +++ b/tests/config/nextflow.config @@ -25,7 +25,7 @@ if ("$PROFILE" == "singularity") { } // We need to consider switch to quay for hosting, see what the free plan offers -docker.registry = 'docker.com' +docker.registry = 'docker.io' // podman.registry = 'quay.io' // singularity.registry = 'quay.io' From 64264096c655838d7e67d139b28bf20fc3ab256c Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 04:36:37 +0000 Subject: [PATCH 40/41] last fixes --- .github/workflows/test.yml | 2 +- README.md | 2 +- docs/MODULE.md | 7 ------- 3 files changed, 2 insertions(+), 9 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9343d9a70..3cd8f0b74 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,4 +1,4 @@ -name: Run tests +name: Modules Tests on: push: branches: [main] diff --git a/README.md b/README.md index 2acea97bb..f8f828526 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) ![Code Linting](https://github.com/scilus/nf-scil/workflows/Code%20Linting/badge.svg) -![Modules Tests](https://github.com/scilus/nf-scil/workflows/Test%20All%20Modules/badge.svg) +![Modules Tests](https://github.com/scilus/nf-scil/workflows/Modules%20Tests/badge.svg) Welcome to `nf-scil` ! A **Nextflow** modules and workflows repository for neuroimaging maintained by the [SCIL team](https://scil-documentation.readthedocs.io/en/latest/). The diff --git a/docs/MODULE.md b/docs/MODULE.md index 2f4240e95..abcd911a7 100644 --- a/docs/MODULE.md +++ b/docs/MODULE.md @@ -55,11 +55,9 @@ already follow all guidelines. You will find related files in : - Add your inputs in the `input:` section : - > [!NOTE] > Each line below `input:` defines an input channel for the process. A channel can > receive one (`val`, `path`, ...) or more (`tuple`) values per item. - > [!IMPORTANT] > When possible, add all optional input parameters (not data !) to `task.ext` instead of > listing them in the `input:` section (see [this section](#defining-processes-optional-parameters) > for more information). @@ -72,7 +70,6 @@ already follow all guidelines. You will find related files in : have to pass it an empty list `[]` for Nextflow to consider its value empty, but correct. - > [!IMPORTANT] > If you decide an input `path` value is optional, add `/* optional, value = [] */` > aside the parameter (e.g. f1 is optional, so `path(f1) /* optional, value = [] */` > or even `tuple val(meta), path(f1) /* optional, value = [] */, path(...` are valid @@ -86,7 +83,6 @@ already follow all guidelines. You will find related files in : The variable `optional_input1` is the one to use in the script. - > [!NOTE] > At its most simple, a variable is `usable` if its conversion to a string is valid > in the script (e.g. : if a variable can be empty or null, then its conversion to an > empty string must be valid in the sense of the script for the variable to be considered @@ -99,11 +95,9 @@ already follow all guidelines. You will find related files in : - File extensions MUST ALWAYS be defined (e.g. `path("*.{nii,nii.gz}")`). - > [!IMPORTANT] > Each line MUST use `emit: ` to make its results available inside Nextflow using > a relevant `name`. Results are accessible using : `PROCESS_NAME.out.`. - > [!NOTE] > Optional outputs ARE possible, add `, optional: true` after the `emit: ` clause. - Fill the `script` section : @@ -125,7 +119,6 @@ already follow all guidelines. You will find related files in : remove the lines in between the `cat` and the `END_VERSIONS` line. In it, add for each dependency a new line in the format : `: `. - > [!NOTE] > You can hard-bake the version as a number here, but if possible extract if from > the dependency dynamically. Refer to the `betcrop/fslbetcrop` module, in `main.nf` > for examples on how to extract the version number correctly. From 4ae564d04751692cdff5d3a741ae67177931a86a Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Thu, 11 Jan 2024 05:23:23 +0000 Subject: [PATCH 41/41] fix python activation (via poetry) in container --- .devcontainer/devcontainer.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b6f80412a..e8c1c5bff 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -32,7 +32,9 @@ "pythonIndent.keepHangingBracketOnLine": true, "pythonIndent.trimLinesWithOnlyWhitespace": true, "python.createEnvironment.trigger": "off", + "python.terminal.activateEnvironment": true, "python.poetryPath": "/root/.local/bin/poetry", + "python.defaultInterpreterPath": "${workspaceFolder}/.venv", "python.analysis.exclude": [ "${workspaceFolder}/.dvc", "${workspaceFolder}/.pytest_cache",