diff --git a/.dependabot/config.yml b/.dependabot/config.yml
deleted file mode 100644
index a0a1799d10..0000000000
--- a/.dependabot/config.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-version: 2
-updates_configs:
- - package_manager: "java:maven"
- directory: "/"
- update_schedule: "daily"
- target-branch: development
- default_labels: "dependabot"
- commit-message:
- prefix: "[DEPENDABOT]"
-
diff --git a/.generate-reports.py b/.generate-reports.py
deleted file mode 100755
index ead7b28fbc..0000000000
--- a/.generate-reports.py
+++ /dev/null
@@ -1,254 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: UTF-8 -*-
-# fancy comments come at a cost!
-
-# Script to generate a Maven site and push reports to a branch (gh-pages by default).
-# This script assumes that both git and Maven have been installed and that the following environment variables
-# are defined:
-# - REPORTS_GITHUB_ACCESS_TOKEN: GitHub personal access token used to push generated reports
-# - REPORTS_GITHUB_USERNAME: username used to push generated reports
-#
-# Yes, these could be passed as arguments, but Travis log would print them out.
-
-# Output of this script is to populate the gh-pages branch with the reports generated by running "mvn site".
-# The structure of the generated reports is similar to:
-#
-# (branch gh-pages) # pages_branch option
-# reports # base_output_dir option
-# ├── development # output_dir positional argument
-# │ ├── index.html
-# │ ├── pmd.html
-# │ ├── jacoco.html
-# │ └── ...
-# │
-# ├── 1.0.0 # output_dir positional argument
-# │ ├── index.html
-# │ ├── pmd.html
-# │ ├── jacoco.html
-# │ └── ...
-# │
-# ├── 1.0.1 # output_dir positional argument
-# │ ├── index.html
-# │ ├── pmd.html
-# │ ├── jacoco.html
-# │ └── ...
-# │
-# └── 2.0.0 # output_dir positional argument
-# ├── index.html
-# ├── pmd.html
-# ├── jacoco.html
-# └── ...
-#
-# So only one "development" version of the reports is maintained, while reports for all
-# tagged commits--assumed to be releases--are maintained on the gh-pages branch.
-#
-# The content of each of the folders is whatever Maven generates on the target/site folder.
-
-
-import argparse, os, shutil, subprocess, tempfile, sys, re
-
-# folder where maven outputs reports generated by running "mvn site"
-MAVEN_SITE_DIR = os.path.join('target', 'site')
-# base directory where reports will be copied to
-BASE_REPORT_DIR = 'reports'
-# credentials are given via environment variables
-TOKEN_ENV_VARIABLE_NAME = 'REPORTS_GITHUB_ACCESS_TOKEN'
-# compiled regex to match files that should not be deleted when cleaning the working folder (in gh-pages)
-UNTOUCHABLE_FILES_MATCHER = re.compile('^\.git.*')
-# regex to validate output folder
-REPORTS_VERSION_REGEX = '^(development|[vV]?\d+\.\d+\.\d+)$'
-
-
-# parses arguments and does the thing
-def main():
- parser = argparse.ArgumentParser(description='QBiC Javadoc Generator.', prog='generate-javadocs.py', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
- parser.add_argument('-s', '--site-dir', default=MAVEN_SITE_DIR,
- help='Directory where Maven reports are found (output of running \'mvn site\').')
- parser.add_argument('-b', '--base-output-dir', default=BASE_REPORT_DIR,
- help='Base directory where the reports will be copied.')
- parser.add_argument('-p', '--pages-branch', default="gh-pages",
- help='Name of the git branch on which the reports will be pushed.')
- parser.add_argument('-a', '--access-token-var-name', default=TOKEN_ENV_VARIABLE_NAME,
- help='Name of the environment variable holding the GitHub personal access token used to push changes in reports.')
- parser.add_argument('-r', '--validation-regex', default=REPORTS_VERSION_REGEX,
- help='Regular expression to validate output_dir; it is assumed that report folders are named after a version.')
- parser.add_argument('--dry-run', action='store_true',
- help='If present, no changes to the remote repository (git commit/push) will be executed.')
- parser.add_argument('--skip-cleanup', action='store_true',
- help='Whether cleanup tasks (removing cloned repos) should be skipped.')
- parser.add_argument('output_dir',
- help='Name of the folder, relative to the base output directory, where reports will be copied to. \
- This folder will be first cleared of its contents before the generated reports are copied. \
- Recommended values are: "development" or a valid release version string (e.g., 1.0.1)')
- parser.add_argument('repo_slug', help='Slug of the repository for which reports are being built.')
- parser.add_argument('commit_message', nargs='+', help='Message(s) to use when committing changes.')
- args = parser.parse_args()
-
- # check that the required environment variables have been defined
- try:
- validateArguments(args)
- except Exception as e:
- print('Error: {}'.format(str(e)), file=sys.stderr)
- exit(1)
-
- # since this will run on Travis, we cannot assume that we can change the current local repo without breaking anything
- # the safest way would be to clone this same repository on a temporary folder and leave the current local repo alone
- working_dir = tempfile.mkdtemp()
- clone_self(working_dir, args)
-
- # reports are available only in a specific branch
- force_checkout_pages_branch(working_dir, args)
-
- # since new branches have a parent commit, we have to remove everything but:
- # * important files (e.g., .git)
- # * the base output directory (args.base_output_dir)
- # otherwise, the newly created gh-pages branch will contain other non-report files!
- # also, it is a good idea to remove everything, since we don't want lingering unused report files
- remove_unneeded_files(working_dir, args)
-
- # move rports to their place
- prepare_report_dir(working_dir, args)
-
- # add, commit, push
- push_to_pages_branch(working_dir, args)
-
- # clean up
- if args.skip_cleanup:
- print('Skipping cleanup of working folder {}'.format(working_dir))
- else:
- print('Removing working folder {}'.format(working_dir))
- shutil.rmtree(working_dir)
-
-
-# Sanity check
-def validateArguments(args):
- # check that the required environment variables are present
- if not args.access_token_var_name in os.environ:
- raise Exception('At least one of the required environment variables is missing. See comments on .generate-reports.py for further information.')
-
- # check if the name of the output_dir matches the regex
- regex = re.compile(args.validation_regex)
- if not regex.match(args.output_dir):
- raise Exception('The provided output directory for the reports, {}, is not valid. It must match the regex {}'.format(args.output_dir, args.validation_regex))
-
- # check that the reports are where they should be (you never know!)
- if not os.path.exists(args.site_dir) or not os.path.isdir(args.site_dir):
- raise Exception('Maven site folder {} does not exist or is not a directory.'.format(args.site_dir))
-
-
-# Clones this repo into the passed working directory, credentials are used because OAuth has a bigger quota
-# plus, we will be pushing changes to gh-pages branch
-def clone_self(working_dir, args, exit_if_fail=True):
- execute(['git', 'clone', 'https://{}:x-oauth-basic@github.com/{}'.format(os.environ[args.access_token_var_name], args.repo_slug), working_dir],
- 'Could not clone {} in directory {}'.format(args.repo_slug, working_dir), exit_if_fail)
-
-
-# Checks out the branch where reports reside (gh-pages)
-def force_checkout_pages_branch(working_dir, args):
- # we need to add the gh-pages branch if it doesn't exist (git checkout -b gh-pages),
- # but if gh-pages already exists, we need to checkout (git checkout gh-pages), luckily,
- # "git checkout branch" fails if branch doesn't exist
- print('Changing to branch {}'.format(args.pages_branch))
- try:
- execute(['git', '-C', working_dir, 'checkout', args.pages_branch], exit_if_fail=False)
- except:
- execute(['git', '-C', working_dir, 'checkout', '-b', args.pages_branch], 'Could not create branch {}'.format(args.pages_branch))
-
-
-# Goes through the all files/folders (non-recursively) and deletes them using 'git rm'.
-# Files that should not be deleted are ignored
-def remove_unneeded_files(working_dir, args):
- print('Cleaning local repository ({}) of non-reports files'.format(working_dir))
- for f in os.listdir(working_dir):
- if should_delete(f, args):
- # instead of using OS calls to delete files/folders, use git rm to stage deletions
- print(' Deleting {} from {} branch'.format(f, args.pages_branch))
- execute(['git', '-C', working_dir, 'rm', '-r', '--ignore-unmatch', f], 'Could not remove {}.'.format(f))
- # files that are not part of the repository aren't removed by git and the --ignore-unmatch flag makes
- # git be nice so it doesn't exit with errors, so we need to force-remove them
- force_delete(os.path.join(working_dir, f))
- else:
- print(' Ignoring file/folder {}'.format(f))
-
-
-# Prepares the report output directory, first by clearing it and then by moving the contents of target/site into it
-def prepare_report_dir(working_dir, args):
- report_output_dir = os.path.join(working_dir, args.base_output_dir, args.output_dir)
- if os.path.exists(report_output_dir):
- if not os.path.isdir(report_output_dir):
- print('WARNING: Output destination {} exists and is not a directory.'.format(report_output_dir), file=sys.stderr)
- # remove the object from git
- print('Removing {}'.format(report_output_dir))
- execute(['git', '-C', working_dir, 'rm', '-r', '--ignore-unmatch', os.path.join(args.base_output_dir, args.output_dir)],
- 'Could not remove {}.'.format(report_output_dir))
- # just in case git doesn't remove the file (if it wasn't tracked, for instance), force deletion using OS calls
- force_delete(report_output_dir)
- # we know the output folder doesn't exist, so we can recreate it
- print('Creating {}'.format(report_output_dir))
- os.makedirs(report_output_dir)
-
- # accidentally the whole target/site folder (well, yes, but actually, no, because we need only its contents)
- print('Moving contents of {} to {}'.format(args.site_dir, report_output_dir))
- for f in os.listdir(args.site_dir):
- print(' Moving {}'.format(f))
- shutil.move(os.path.join(args.site_dir, f), report_output_dir)
-
-
-# Adds, commits and pushes changes
-def push_to_pages_branch(working_dir, args):
- if args.dry_run:
- print('(running in dry run mode) Local/remote repository will not be modified')
- else:
- # add changes to the index
- print('Staging changes for commit')
- execute(['git', '-C', working_dir, 'add', '.'], 'Could not stage reports for commit.')
-
- # build the git-commit command and commit changes
- print('Pushing changes upstream')
- git_commit_command = ['git', '-C', working_dir, 'commit']
- for commit_message in args.commit_message:
- git_commit_command.extend(['-m', commit_message])
- execute(git_commit_command, 'Could not commit changes')
-
- # https://www.youtube.com/watch?v=vCadcBR95oU
- execute(['git', '-C', working_dir, 'push', '-u', 'origin', args.pages_branch], 'Could not push changes using provided credentials.')
-
-
-# Whether it is safe to delete the given path, we won't delete important files/folders (such as .git)
-# or the base output directory
-def should_delete(path, args):
- return not UNTOUCHABLE_FILES_MATCHER.match(path) and path != args.base_output_dir
-
-
-# Forcefully deletes recursively the passed file/folder using OS calls
-def force_delete(file):
- if os.path.exists(file):
- if os.path.isdir(file):
- shutil.rmtree(file)
- else:
- os.remove(file)
-
-
-# Executes an external command
-# stderr/stdout are hidden to avoid leaking credentials into log files in Travis, so it might be a pain in the butt to debug, sorry, but safety first!
-# if exit_if_fail is set to True, this method will print minimal stacktrace information and exit if a failure is encountered, otherwise, an exception
-# will be thrown (this is useful if an error will be handled by the invoking method)
-def execute(command, error_message='Error encountered while executing command', exit_if_fail=True):
- # do not print the command, stderr or stdout! this might expose usernames/passwords/tokens!
- try:
- subprocess.run(command, check=True, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL)
- except:
- if exit_if_fail:
- stack = traceback.extract_stack()
- try:
- print('{}\n Error originated at file {}, line {}'.format(error_message, stack[-2].filename, stack[-2].lineno), file=sys.stderr)
- except:
- print('{}\n No information about the originating call is available.'.format(error_message), file=sys.stderr)
- exit(1)
- else:
- raise Exception()
-
-
-
-if __name__ == "__main__":
- main()
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
deleted file mode 100644
index d162979ed6..0000000000
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ /dev/null
@@ -1,28 +0,0 @@
----
-name: Bug report
-about: Create a bug report to help us improve
-title: 'Bug Summary'
-labels: 'bug'
-assignees: ''
-
----
-
-**Describe the bug**
-A clear and concise description of what the bug is.
-
-**To Reproduce**
-Steps to reproduce the behavior:
-1. ...
-2. ...
-3. ...
-
-**Expected behavior**
-A clear and concise description of what you expected to happen.
-
-**System [please complete the following information]:**
- - OS: e.g. [Ubuntu 18.04]
- - Language Version: [e.g. Python 3.8]
- - Virtual environment: [e.g. Conda]
-
-**Additional context**
-Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index 3bed8fc2f7..0000000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,20 +0,0 @@
----
-name: Feature request
-about: Suggest a new feature
-title: 'Feature Request Summary'
-labels: 'enhancement'
-assignees: ''
-
----
-
-**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. I'm always frustrated when ...
-
-**Describe the solution you'd like**
-A clear and concise description of what you want to happen.
-
-**Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
-
-**Additional context**
-Add any other context or screenshots about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/general_question.md b/.github/ISSUE_TEMPLATE/general_question.md
deleted file mode 100644
index 37c39b39b9..0000000000
--- a/.github/ISSUE_TEMPLATE/general_question.md
+++ /dev/null
@@ -1,13 +0,0 @@
----
-name: General question
-about: Ask a question about anything related to this project
-title: 'Question'
-labels: 'question'
-assignees: ''
-
----
-
-**Question**
-
-Please ask your question here. It can be about the usage of this project, the internals, the implementation or whatever interests you.
-Please use the BUG template for bugs and the FEATURE REQUEST template for feature requests.
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
deleted file mode 100644
index f6fd3ac9a6..0000000000
--- a/.github/pull_request_template.md
+++ /dev/null
@@ -1,19 +0,0 @@
-Many thanks for contributing to this project!
-
-**PR Checklist**
-Please fill in the appropriate checklist below (delete whatever is not relevant). These are the most common things requested on pull requests (PRs).
-
-- [ ] This comment contains a description of changes (with reason)
-- [ ] Referenced issue is linked
-- [ ] If you've fixed a bug or added code that should be tested, add tests!
-- [ ] Documentation in `docs` is updated
-- [ ] `CHANGELOG.rst` is updated
-
-**Description of changes**
-Please state what you've changed and how it might affect the user.
-
-**Technical details**
-Please state any technical details such as limitations, reasons for additional dependencies, benchmarks etc. here.
-
-**Additional context**
-Add any other context or screenshots here.
diff --git a/.github/workflows/build_package.yml b/.github/workflows/build_package.yml
index 9d334b8fa9..d616186cc2 100644
--- a/.github/workflows/build_package.yml
+++ b/.github/workflows/build_package.yml
@@ -1,6 +1,12 @@
name: Build Maven Package
-on: [push]
+on:
+ push:
+ branches:
+ - '**'
+ pull_request:
+ # The branches below must be a subset of the branches above
+ branches: [ main, master ]
jobs:
package:
@@ -19,6 +25,5 @@ jobs:
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
-
- name: Run mvn package
run: mvn -B package --file pom.xml
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index ef963f52a2..d708ad0ff5 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -13,10 +13,10 @@ name: "CodeQL"
on:
push:
- branches: [ master, development, patch/*, release/*, hotfix/* ]
+ branches: [ main, master, development, release/*, hotfix/* ]
pull_request:
# The branches below must be a subset of the branches above
- branches: [ master ]
+ branches: [ main, master ]
schedule:
- cron: '21 1 * * 4'
diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml
index 57d5afed65..ebae605662 100644
--- a/.github/workflows/create-release.yml
+++ b/.github/workflows/create-release.yml
@@ -42,7 +42,7 @@ jobs:
|| contains(github.event.inputs.versionTag, 'rc')) }}
uses: actions/github-script@v4.0.2
with:
- github-token: ${{secrets.GITHUB_TOKEN}}
+ github-token: ${{secrets.JOHNNY_Q5_REPORTS_TOKEN}}
script: |
await github.request(`POST /repos/${{ github.repository }}/releases`, {
tag_name: "${{ github.event.inputs.versionTag }}",
@@ -55,7 +55,7 @@ jobs:
|| contains(github.event.inputs.versionTag, 'rc')) }}
uses: actions/github-script@v4.0.2
with:
- github-token: ${{secrets.GITHUB_TOKEN}}
+ github-token: ${{secrets.JOHNNY_Q5_REPORTS_TOKEN}}
script: |
await github.request(`POST /repos/${{ github.repository }}/releases`, {
tag_name: "${{ github.event.inputs.versionTag }}",
@@ -71,19 +71,33 @@ jobs:
- name: Switch to new branch
run: git checkout -b release/set-version-to-${{ github.event.inputs.versionTag }}
+ - name: Set remote branch
+ run: git push --set-upstream origin release/set-version-to-${{ github.event.inputs.versionTag }}
+
- name: Checkin commit
run: git commit . -m 'Set version to ${{ github.event.inputs.versionTag }}'
- - name: Set remote branch
- run: git push --set-upstream origin release/set-version-to-${{ github.event.inputs.versionTag }}
+ - name: Push to Github
+ run: git push
- name: Open PR with version bump
uses: actions/github-script@v4.0.2
with:
- github-token: ${{secrets.GITHUB_TOKEN}}
+ github-token: ${{secrets.JOHNNY_Q5_REPORTS_TOKEN}}
script: |
await github.request(`POST /repos/${{ github.repository }}/pulls`, {
title: 'Update version to ${{ github.event.inputs.versionTag }}',
head: 'release/set-version-to-${{ github.event.inputs.versionTag }}',
- base: 'main'
+ base: 'master'
+ });
+
+ - name: Open PR to development
+ uses: actions/github-script@v4.0.2
+ with:
+ github-token: ${{secrets.JOHNNY_Q5_REPORTS_TOKEN}}
+ script: |
+ await github.request(`POST /repos/${{ github.repository }}/pulls`, {
+ title: 'Merge release ${{ github.event.inputs.versionTag }} into development',
+ head: 'master',
+ base: 'development'
});
diff --git a/.github/workflows/java_checkstyle.yml b/.github/workflows/java_checkstyle.yml
deleted file mode 100644
index a41426e130..0000000000
--- a/.github/workflows/java_checkstyle.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-name: Java Checkstyle
-
-on: [push]
-
-jobs:
- checkstyle:
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
- - name: Set up JDK 1.11
- uses: actions/setup-java@v1
- with:
- java-version: 1.11
-
- - name: Download Checkstyle
- run: wget https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.31/checkstyle-8.31-all.jar
-
- - name: Download Google style xml
- run: wget https://raw.githubusercontent.com/checkstyle/checkstyle/checkstyle-8.28/src/main/resources/google_checks.xml
-
- - name: Run Checkstyle
- run: java -jar checkstyle-8.31-all.jar -c google_checks.xml .
diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml
index 452314f2b0..5708a25850 100644
--- a/.github/workflows/run_tests.yml
+++ b/.github/workflows/run_tests.yml
@@ -1,6 +1,12 @@
name: Run Maven Tests
-on: [push]
+on:
+ push:
+ branches:
+ - '**'
+ pull_request:
+ # The branches below must be a subset of the branches above
+ branches: [ main, master ]
jobs:
test:
@@ -8,10 +14,10 @@ jobs:
steps:
- uses: actions/checkout@v2
- - name: Set up JDK 1.11
+ - name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
- java-version: 1.11
+ java-version: 1.8
- name: Load local Maven repository cache
uses: actions/cache@v2
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
deleted file mode 100644
index 8993572ecd..0000000000
--- a/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at info@qbic.uni-tuebingen.de. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/README.md b/README.md
index d97d27b359..db8b29084a 100644
--- a/README.md
+++ b/README.md
@@ -101,11 +101,8 @@ Make sure, that you have defined the Github package Maven repository, in order f
A Nanopore NGS measurement output is delivered to us as a nested folder structure, following this model:
-![Nanopore Data Structure Model](./doc/figures/Nanopore_Data_Structure_Model.svg)
+![Nanopore Data Structure Model](./doc/figures/Nanopore_Data_Structure_Model.png)
-A more recent model, which places two of the configuration files into a subfolder and adds the barcode alignment file, is also supported:
-
-![Nanopore Data Structure Model v2](./doc/figures/Nanopore_Data_Structure_Model_v2.svg)
#### Nanopore usage example
diff --git a/doc/figures/ER_diagram_pipeline_results.png b/doc/figures/ER_diagram_pipeline_results.png
index 4ff4b681ff..83f2bfe21c 100644
Binary files a/doc/figures/ER_diagram_pipeline_results.png and b/doc/figures/ER_diagram_pipeline_results.png differ
diff --git a/doc/figures/MaxQuant_Data_Structure.png b/doc/figures/MaxQuant_Data_Structure.png
index 61b1a33813..bf793296ac 100644
Binary files a/doc/figures/MaxQuant_Data_Structure.png and b/doc/figures/MaxQuant_Data_Structure.png differ
diff --git a/doc/figures/Nanopore_Data_Structure_Model.png b/doc/figures/Nanopore_Data_Structure_Model.png
new file mode 100644
index 0000000000..d13f4297dc
Binary files /dev/null and b/doc/figures/Nanopore_Data_Structure_Model.png differ
diff --git a/doc/figures/Nanopore_Data_Structure_Model.svg b/doc/figures/Nanopore_Data_Structure_Model.svg
deleted file mode 100644
index d3ffbbd6fd..0000000000
--- a/doc/figures/Nanopore_Data_Structure_Model.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-
-
\ No newline at end of file
diff --git a/doc/figures/Nanopore_Data_Structure_Model_v2.svg b/doc/figures/Nanopore_Data_Structure_Model_v2.svg
deleted file mode 100644
index 330e266c8b..0000000000
--- a/doc/figures/Nanopore_Data_Structure_Model_v2.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-
-
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index d6f64bf56f..7476796209 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,7 +7,7 @@
life.qbicdata-model-lib
- 2.15.0
+ 2.25.1data-model-libhttp://github.com/qbicsoftware/data-model-libData models. A collection of QBiC's central data models and DTOs.
@@ -81,20 +81,20 @@
org.codehaus.groovygroovy-bom
- 2.5.14
+ 3.0.9pomimport
-
- org.codehaus.groovy
- groovy-all
- 2.5.14
- pom
- ${osgi.scope}
-
+
+ org.codehaus.groovy
+ groovy-all
+ 3.0.10
+ pom
+ ${osgi.scope}
+ org.osgiosgi.core
@@ -111,7 +111,7 @@
life.qbicxml-manager-lib
- 1.6.0
+ 1.7.0${osgi.scope}
@@ -144,7 +144,7 @@
org.spockframeworkspock-core
- 2.0-groovy-2.5
+ 2.1-groovy-3.0test
@@ -171,7 +171,7 @@
maven-surefire-plugin
- 2.22.2
+ 3.0.0-M5**/*Spec
@@ -181,7 +181,7 @@
org.codehaus.gmavenplusgmavenplus-plugin
- 1.12.1
+ 1.13.1
@@ -217,12 +217,12 @@
org.apache.maven.pluginsmaven-site-plugin
- 3.9.1
+ 3.11.0org.apache.maven.pluginsmaven-project-info-reports-plugin
- 3.1.1
+ 3.2.1life.qbic
@@ -269,7 +269,7 @@
biz.aQute.bndbnd-maven-plugin
- 5.1.2
+ 6.1.0
diff --git a/src/main/groovy/life/qbic/datamodel/datasets/MaxQuantRunResult.groovy b/src/main/groovy/life/qbic/datamodel/datasets/MaxQuantRunResult.groovy
index a64a9f924e..bde58328ca 100644
--- a/src/main/groovy/life/qbic/datamodel/datasets/MaxQuantRunResult.groovy
+++ b/src/main/groovy/life/qbic/datamodel/datasets/MaxQuantRunResult.groovy
@@ -29,19 +29,18 @@ final class MaxQuantRunResult {
private final static Set maxQuantFileTypes = [
FQDN_FILES + ".AllPeptides",
FQDN_FILES + ".Evidence",
- FQDN_FILES + ".ExperimentalDesignTemplate",
FQDN_FILES + ".Parameters",
FQDN_FILES + ".Peptides",
FQDN_FILES + ".ProteinGroups",
FQDN_FILES + ".RunParameters",
GENERAL_FILES + ".SampleIds",
- FQDN_FILES + ".Summary"
]
private final AllPeptides allPeptides
private final Evidence evidence
+ @Deprecated
private final ExperimentalDesignTemplate experimentalDesignTemplate
private final Parameters parameters
@@ -54,8 +53,10 @@ final class MaxQuantRunResult {
private final SampleIds sampleIds
+ @Deprecated
private final Summary summary
+ @Deprecated
MaxQuantRunResult(AllPeptides allPeptides, Evidence evidence, ExperimentalDesignTemplate experimentalDesignTemplate, Parameters parameters, Peptides peptides, ProteinGroups proteinGroups, RunParameters runParameters, SampleIds sampleIds, Summary summary) {
this.allPeptides = Objects.requireNonNull(allPeptides, "allPeptides must not be null.")
this.evidence = Objects.requireNonNull(evidence, "evidence must not be null.")
@@ -68,9 +69,19 @@ final class MaxQuantRunResult {
this.summary = Objects.requireNonNull(summary, "summary must not be null.")
}
+ MaxQuantRunResult(AllPeptides allPeptides, Evidence evidence, Parameters parameters, Peptides peptides, ProteinGroups proteinGroups, RunParameters runParameters, SampleIds sampleIds) {
+ this.allPeptides = Objects.requireNonNull(allPeptides, "allPeptides must not be null.")
+ this.evidence = Objects.requireNonNull(evidence, "evidence must not be null.")
+ this.parameters = Objects.requireNonNull(parameters, "parameters must not be null.")
+ this.peptides = Objects.requireNonNull(peptides, "peptides must not be null.")
+ this.proteinGroups = Objects.requireNonNull(proteinGroups, "proteinGroups must not be null.")
+ this.runParameters = Objects.requireNonNull(runParameters, "runParameters must not be null.")
+ this.sampleIds = Objects.requireNonNull(sampleIds, "sampleIds must not be null.")
+ }
+
/**
- * Static factory method that creates a new maxQuantRunResult instance from the bioinformatic pipeline output.
- * See this @{link example}
+ * Static factory method that creates a new maxQuantRunResult instance from the MaxQuant output.
+ * See this @{link example}
* for a JSON representation of a valid map structure
*
* @param Map maxQuantRunOutput
@@ -82,27 +93,23 @@ final class MaxQuantRunResult {
//Check if the required folders are in the root directory
Objects.requireNonNull(maxQuantRunOutput.get("allPeptides"), "The provided directory must contain a allPeptides.txt file.")
Objects.requireNonNull(maxQuantRunOutput.get("evidence"), "The provided directory must contain a evidence.txt file.")
- Objects.requireNonNull(maxQuantRunOutput.get("experimentalDesignTemplate"), "The provided directory must contain a experimentalDesignTemplate.txt file.")
Objects.requireNonNull(maxQuantRunOutput.get("parameters"), "The provided directory must contain a parameters.txt file.")
Objects.requireNonNull(maxQuantRunOutput.get("peptides"), "The provided directory must contain a peptides.txt file.")
Objects.requireNonNull(maxQuantRunOutput.get("proteinGroups"), "The provided directory must contain a proteinGroups.txt file.")
Objects.requireNonNull(maxQuantRunOutput.get("runParameters"), "The provided director must contain a runParameters.xml file.")
Objects.requireNonNull(maxQuantRunOutput.get("sampleIds"), "The provided directory must contain a sampleIds.txt file.")
- Objects.requireNonNull(maxQuantRunOutput.get("summary"), "The provided directory must contain a summary.pdf file.")
//Get Files from Root Directory
AllPeptides allPeptides = parseFile(maxQuantRunOutput.get("allPeptides") as Map) as AllPeptides
Evidence evidence = parseFile(maxQuantRunOutput.get("evidence") as Map) as Evidence
- ExperimentalDesignTemplate experimentalDesignTemplate = parseFile(maxQuantRunOutput.get("experimentalDesignTemplate") as Map) as ExperimentalDesignTemplate
Parameters parameters = parseFile(maxQuantRunOutput.get("parameters") as Map) as Parameters
Peptides peptides = parseFile(maxQuantRunOutput.get("peptides") as Map) as Peptides
ProteinGroups proteinGroups = parseFile(maxQuantRunOutput.get("proteinGroups") as Map) as ProteinGroups
RunParameters runParameters = parseFile(maxQuantRunOutput.get("runParameters") as Map) as RunParameters
SampleIds sampleIds = parseFile(maxQuantRunOutput.get("sampleIds") as Map) as SampleIds
- Summary summary = parseFile(maxQuantRunOutput.get("summary") as Map) as Summary
//Create new MaxQuantRunResult object with parsed information
- return new MaxQuantRunResult(allPeptides, evidence, experimentalDesignTemplate, parameters, peptides, proteinGroups, runParameters, sampleIds, summary)
+ return new MaxQuantRunResult(allPeptides, evidence, parameters, peptides, proteinGroups, runParameters, sampleIds)
}
/**
@@ -128,6 +135,7 @@ final class MaxQuantRunResult {
* @return an ExperimentalDesignTemplate file generated by MaxQuant
* @since 2.10.0
*/
+ @Deprecated
ExperimentalDesignTemplate getExperimentalDesignTemplate() {
return experimentalDesignTemplate
}
@@ -182,6 +190,7 @@ final class MaxQuantRunResult {
* @return a Summary file generated by MaxQuant
* @since 2.10.0
*/
+ @Deprecated
Summary getSummary() {
return summary
}
diff --git a/src/main/groovy/life/qbic/datamodel/datasets/NfCorePipelineResult.groovy b/src/main/groovy/life/qbic/datamodel/datasets/NfCorePipelineResult.groovy
index 7c7e5e126f..65f030dd17 100644
--- a/src/main/groovy/life/qbic/datamodel/datasets/NfCorePipelineResult.groovy
+++ b/src/main/groovy/life/qbic/datamodel/datasets/NfCorePipelineResult.groovy
@@ -3,7 +3,7 @@ package life.qbic.datamodel.datasets
import life.qbic.datamodel.datasets.datastructure.files.DataFile
import life.qbic.datamodel.datasets.datastructure.files.nfcore.ExecutionReport
-import life.qbic.datamodel.datasets.datastructure.files.nfcore.PipelineReport
+
import life.qbic.datamodel.datasets.datastructure.files.nfcore.RunId
import life.qbic.datamodel.datasets.datastructure.files.general.SampleIds
import life.qbic.datamodel.datasets.datastructure.files.nfcore.SoftwareVersions
@@ -33,7 +33,6 @@ final class NfCorePipelineResult {
private final static Set nfCoreFileTypes = [
FQDN_FILES + ".ExecutionReport",
GENERAL_FILES + ".SampleIds",
- FQDN_FILES + ".PipelineReport",
FQDN_FILES + ".SoftwareVersions",
FQDN_FILES + ".RunId"
]
@@ -46,6 +45,7 @@ final class NfCorePipelineResult {
private SampleIds sampleIds
+ // The RunId is only generated if the result was generated by a NF-Tower instance
private RunId runId
private PipelineInformationFolder pipelineInformationFolder
@@ -54,7 +54,7 @@ final class NfCorePipelineResult {
private List processFolders
- NfCorePipelineResult(PipelineInformationFolder pipelineInformationFolder, QualityControlFolder qualityControlFolder, List processFolders, RunId runId, SampleIds sampleIds) {
+ NfCorePipelineResult(PipelineInformationFolder pipelineInformationFolder, QualityControlFolder qualityControlFolder, List processFolders, RunId runId, SampleIds sampleIds) {
Objects.requireNonNull(pipelineInformationFolder, "Please provide a PipelineInformation folder.")
Objects.requireNonNull(qualityControlFolder, "Please provide a QualityControl folder")
Objects.requireNonNull(processFolders, "Please provide a List of process folders")
@@ -68,6 +68,17 @@ final class NfCorePipelineResult {
this.sampleIds = sampleIds
}
+ NfCorePipelineResult(PipelineInformationFolder pipelineInformationFolder, QualityControlFolder qualityControlFolder, List processFolders, SampleIds sampleIds) {
+ Objects.requireNonNull(pipelineInformationFolder, "Please provide a PipelineInformation folder.")
+ Objects.requireNonNull(qualityControlFolder, "Please provide a QualityControl folder")
+ Objects.requireNonNull(processFolders, "Please provide a List of process folders")
+ Objects.requireNonNull(sampleIds, "Please provide a sampleIds file")
+ this.pipelineInformationFolder = pipelineInformationFolder
+ this.qualityControlFolder = qualityControlFolder
+ this.processFolders = processFolders
+ this.sampleIds = sampleIds
+ }
+
/**
* Static factory method that creates a new nfcoreExperiment instance from the bioinformatic pipeline output.
* See this @{link example}
@@ -81,15 +92,13 @@ final class NfCorePipelineResult {
//Check if all required folders are in root directory
Objects.requireNonNull(bioinformaticPipelineOutput.get("pipelineInformation"), "The root folder must contain a PipelineInformation folder.")
- Objects.requireNonNull(bioinformaticPipelineOutput.get("qualityControl"),"The root folder must contain a QualityControl folder.")
+ Objects.requireNonNull(bioinformaticPipelineOutput.get("qualityControl"), "The root folder must contain a QualityControl folder.")
Objects.requireNonNull(bioinformaticPipelineOutput.get("processFolders"), "The root folder must contain at least one process folder.")
//Check if all required files are in the pipeline_info directory
Map pipelineInfoMap = bioinformaticPipelineOutput["pipelineInformation"] as Map
- Objects.requireNonNull(pipelineInfoMap.get("softwareVersions"), "The pipeline_info folder must contain a softwareVersions.csv file.")
- Objects.requireNonNull(pipelineInfoMap.get("executionReport"), "The pipeline_info folder must contain a executionReport.txt file.")
- Objects.requireNonNull(pipelineInfoMap.get("pipelineReport"), "The pipeline_info folder must contain a pipeline_info.txt file.")
+ Objects.requireNonNull(pipelineInfoMap.get("softwareVersions"), "The pipeline_info folder must contain a softwareVersions.yml file.")
+ Objects.requireNonNull(pipelineInfoMap.get("executionReport"), "The pipeline_info folder must contain a executionReport.html file.")
//Check if all required files are in root directory
- Objects.requireNonNull(bioinformaticPipelineOutput.get("runId"), "The root folder must contain a run_id.txt file.")
Objects.requireNonNull(bioinformaticPipelineOutput.get("sampleIds"), "The root folder must contain an sample_ids.txt file.")
//Parse all folders in the root directory
@@ -105,19 +114,21 @@ final class NfCorePipelineResult {
//These files are not stored as children but as properties of the pipeline_info folder
DataFile softwareVersions = parseFile(pipelineInfoMap.get("softwareVersions") as Map)
DataFile executionReport = parseFile(pipelineInfoMap.get("executionReport") as Map)
- DataFile pipelineReport = parseFile(pipelineInfoMap.get("pipelineReport") as Map)
//Set information of pipelineInformation properties
pipelineInformation.softwareVersions = softwareVersions as SoftwareVersions
- pipelineInformation.pipelineReport = pipelineReport as PipelineReport
pipelineInformation.executionReport = executionReport as ExecutionReport
- //Parse all files in the root directory
- DataFile runId = parseFile(bioinformaticPipelineOutput.get("runId") as Map) as RunId
+ //Parse all mandatory files in the root directory
DataFile sampleIds = parseFile(bioinformaticPipelineOutput.get("sampleIds") as Map) as SampleIds
- //Create new NfCorePipelineResult with parsed information
- return new NfCorePipelineResult(pipelineInformation, qualityControl, processFolders, runId, sampleIds)
+ // Parse optional Files in the root directory and generate NfCorePipelineResult accordingly
+ if (bioinformaticPipelineOutput.get("runId") != null) {
+ DataFile runId = parseFile(bioinformaticPipelineOutput.get("runId") as Map) as RunId
+ return new NfCorePipelineResult(pipelineInformation, qualityControl, processFolders, runId, sampleIds)
+ } else {
+ return new NfCorePipelineResult(pipelineInformation, qualityControl, processFolders, sampleIds)
+ }
}
/**
@@ -169,11 +180,12 @@ final class NfCorePipelineResult {
/*
* Helper method that creates a DataFile instance from a map
*/
+
private static DataFile parseFile(Map fileTree) throws IllegalArgumentException {
String name = fileTree.get("name")
String fileType = fileTree.get("fileType")
String path = fileTree.get("path")
-
+
for (String nfCoreFileType : nfCoreFileTypes) {
Class> c = Class.forName(nfCoreFileType)
Method method = c.getDeclaredMethod("create", String.class, String.class)
@@ -187,15 +199,15 @@ final class NfCorePipelineResult {
}
}
// We have to check for files of unknown type since this Parser will encounter variable file output dependent on the pipeline
- if(!fileType)
- {
- throw new IllegalArgumentException("File $name with path $path is of unknown nfcore file type.")
+ if (!fileType) {
+ throw new IllegalArgumentException("File $name with path $path is of unknown nfcore file type.")
}
}
/*
* Helper method that creates a DataFolder instance from a map
*/
+
private static DataFolder parseFolder(Map fileTree) throws IllegalArgumentException {
def name = fileTree.get("name") as String
@@ -219,18 +231,19 @@ final class NfCorePipelineResult {
* Helper method that tries to create a DataFolder instance
* based on the DataFolder's different static factory create methods.
*/
+
private static Optional tryToCreateDataFolder(Method method,
String name,
String relativePath,
List children) {
Optional folder = Optional.empty()
- try {
- // We only have named Folders
- def dataFolder = method.invoke(null, name, relativePath, children) as DataFolder
- folder = Optional.of(dataFolder)
- } catch (InvocationTargetException e2) {
- // Do nothing
- }
+ try {
+ // We only have named Folders
+ def dataFolder = method.invoke(null, name, relativePath, children) as DataFolder
+ folder = Optional.of(dataFolder)
+ } catch (InvocationTargetException e2) {
+ // Do nothing
+ }
return folder
}
@@ -238,6 +251,7 @@ final class NfCorePipelineResult {
/*
* Helper method that parses the children of a folder.
*/
+
private static List parseChildren(List