diff --git a/data.js b/data.js index 2b77c21..3396ae9 100644 --- a/data.js +++ b/data.js @@ -1,5 +1,512 @@ var data = [ + { + "config_readme": null, + "data_format": 2, + "description": "Barcode Recovery and Comparison from Database", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/rules/commons.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\s\u0027\n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/rules/reports.smk\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\q\u0027\n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/rules/pairwise_alignement.smk\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\s\u0027\n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/rules/extract_barcodes.smk\": Formatted content is different from original\n[INFO] 5 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "CVUA-RRW/BaRCoD", + "latest_release": "1.1.1", + "linting": "WorkflowError in file /tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/Snakefile, line 12:\nWorkflow defines configfile /tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/../.tests/config/config.yaml but it is not present or accessible (full checked path: /tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/.tests/config/config.yaml).\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 0, + "topics": [], + "updated_at": 1726142474.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Find single-stranded DNA breaks with an error-prone DNA polymerase", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp_sbd6gc0/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "NBISweden/Sloppymerase", + "latest_release": null, + "linting": "Lints for rule count_illumina_reads (line 24, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule map_illumina (line 33, /tmp/tmp_sbd6gc0/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule count_nanopore_pacbio_reads (line 49, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule map_nanopore (line 57, /tmp/tmp_sbd6gc0/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule map_pacbio (line 71, /tmp/tmp_sbd6gc0/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule detect_breaks (line 84, /tmp/tmp_sbd6gc0/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule sort_bed (line 106, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule subtract_illumina_controls (line 116, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule simulate_nickase (line 128, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule random_sites (line 141, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule intersect_nickase_sites (line 153, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule intersect_random_sites (line 162, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule index_bed (line 171, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule index_bam (line 177, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule bgzip_bed (line 182, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 7, + "topics": [], + "updated_at": 1726141738.0 + }, + { + "config_readme": "\u003cp\u003eDescribe how to configure the workflow (using config.yaml and maybe additional files).\nAll of them need to be present with example entries inside of the config folder.\u003c/p\u003e\n", + "data_format": 2, + "description": "create long read (Nanopore with NanoSim) and short read (Illumina with Circle-Map) testing data for ecDNA analysis workflows", + "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmpiw53r2g_/workflow/Snakefile\": Keyword \"params\" at line 56 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[ERROR] In file \"/tmp/tmpiw53r2g_/workflow/Snakefile\": IndexError: pop from empty list\n[INFO] In file \"/tmp/tmpiw53r2g_/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", + "full_name": "dlaehnemann/create-ecdna-testing-data", + "latest_release": null, + "linting": "Workflow defines that rule get_chromosomes_ref is eligible for caching between workflows (use the --cache argument to enable this).\nLints for snakefile /tmp/tmpiw53r2g_/workflow/Snakefile:\n * Absolute path \"/human_NA12878_DNA_FAB49712_guppy/training\" in line 12:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/human_NA12878_DNA_FAB49712_guppy/training\" in line 16:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n\nLints for rule config_segment_to_bed (line 65, /tmp/tmpiw53r2g_/workflow/Snakefile):\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule create_samples_sheet (line 277, /tmp/tmpiw53r2g_/workflow/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule create_units_sheet (line 305, /tmp/tmpiw53r2g_/workflow/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\n", + "mandatory_flags": { + "desc": null, + "flags": null + }, + "report": true, + "software_stack_deployment": { + "conda": true, + "singularity": false, + "singularity+conda": false + }, + "standardized": true, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726140775.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Just annotate the MAGs as in mg_assembly", + "formatting": null, + "full_name": "3d-omics/mg_annotate", + "latest_release": null, + "linting": "Lints for snakefile /tmp/tmpi0370ikk/workflow/rules/dram.smk:\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n\nLints for rule checkm2__quality_report__ (line 27, /tmp/tmpi0370ikk/workflow/rules/checkm2.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726134655.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "La idea es crear una p\u00e1gina web con especies de fauna y flora de Gran Canaria que vaya encontrando, vamos a ver como hacemos esto", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp7el7akpu/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "JuanCarlosBio/BiodiversidadGJC", + "latest_release": null, + "linting": "Lints for rule download_images (line 26, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule download_canary_islands_shp (line 41, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule download_extra_layers (line 54, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule download_biota_data (line 71, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule process_canary_islands_shp (line 83, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule process_jardin_botanico_kml (line 99, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule process_biota_data (line 112, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule figures_and_stats (line 146, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule webpage_html (line 165, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726127542.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "A Snakemake pipeline to analyze RNA-seq expression data", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/diffexp.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/quantification.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmp31et65mg/workflow/Snakefile\": Keyword \"input\" at line 44 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/plots.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/splicing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/trim.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/align.smk\": Formatted content is different from original\n[INFO] 7 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "kristinassong/RNAseq", + "latest_release": null, + "linting": "Lints for rule star_index (line 1, /tmp/tmp31et65mg/workflow/rules/align.smk):\n * Param idx is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule star_align (line 27, /tmp/tmp31et65mg/workflow/rules/align.smk):\n * Param out_prefix is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param idx is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule genomecov (line 97, /tmp/tmp31et65mg/workflow/rules/align.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule kallisto_quant (line 38, /tmp/tmp31et65mg/workflow/rules/quantification.smk):\n * Param outdir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule tx2gene (line 67, /tmp/tmp31et65mg/workflow/rules/quantification.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule merge_kallisto_quant (line 80, /tmp/tmp31et65mg/workflow/rules/quantification.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule deseq2 (line 3, /tmp/tmp31et65mg/workflow/rules/diffexp.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Param kallisto_dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param out_dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule volcano_plot (line 22, /tmp/tmp31et65mg/workflow/rules/diffexp.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule pca (line 1, /tmp/tmp31et65mg/workflow/rules/plots.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule multiqc (line 17, /tmp/tmp31et65mg/workflow/rules/plots.smk):\n * Param outdir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule rmats (line 1, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule filter_rmats (line 23, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Param dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule rmats_paired_env (line 46, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule rmats_paired (line 62, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule filter_rmats_paired (line 85, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Param dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [ + "rna-seq-pipeline", + "snakemake-workflow" + ], + "updated_at": 1726112847.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Workflow for processing UAS imagery into data on bird location and species for near real-time monitoring in the Everglades", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpopwg9ani/weecology-everwatch-workflow-2e6fceb/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "weecology/everwatch-workflow", + "latest_release": "v0.2.0", + "linting": "WorkflowError in file /tmp/tmpopwg9ani/weecology-everwatch-workflow-2e6fceb/Snakefile, line 5:\nWorkflow defines configfile /blue/ewhite/everglades/everwatch-workflow/snakemake_config.yml but it is not present or accessible (full checked path: /blue/ewhite/everglades/everwatch-workflow/snakemake_config.yml).\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 4, + "topics": [], + "updated_at": 1726093707.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[ERROR] In file \"/tmp/tmpb_4sx3jl/workflow/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 75:0: EOF in multi-line statement\n```\n\n[INFO] In file \"/tmp/tmpb_4sx3jl/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", + "full_name": "SoftLivingMatter/5eU-seq-pipelines", + "latest_release": null, + "linting": "Creating specified working directory /your/directory/here.\nPermissionError in file /tmp/tmpb_4sx3jl/workflow/Snakefile, line 7:\n[Errno 13] Permission denied: \u0027/your\u0027\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1311, in mkdir\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 0, + "topics": [], + "updated_at": 1726177806.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Workflow to identify the centromere dip region (CDR) from methyl BAMs", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpumxp1g0a/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[INFO] 1 file(s) would be changed \ud83d\ude2c\n[INFO] 1 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", + "full_name": "koisland/CDR-Finder", + "latest_release": null, + "linting": null, + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726071971.0 + }, + { + "config_readme": "\u003cp\u003eDescribe how to configure the workflow (using config.yaml and maybe additional files).\nAll of them need to be present with example entries inside of the config folder.\u003c/p\u003e\n", + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpbyphbz3g/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "vkpat/short-read-mapping-and-QC-snakemake-pipeline", + "latest_release": null, + "linting": "WorkflowError in file /tmp/tmpbyphbz3g/workflow/Snakefile, line 1:\nWorkflow defines configfile config.yaml but it is not present or accessible (full checked path: /tmp/tmpbyphbz3g/config.yaml).\n", + "mandatory_flags": { + "desc": null, + "flags": null + }, + "report": true, + "software_stack_deployment": { + "conda": true, + "singularity": false, + "singularity+conda": false + }, + "standardized": true, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726070051.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmptmii62qa/rules/renv.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmptmii62qa/Snakefile\": Formatted content is different from original\n[INFO] 2 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "deer-marketing-lab/dsms-lecture-ugc-ratings", + "latest_release": null, + "linting": "Lints for rule build_html (line 16, /tmp/tmptmii62qa/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable run_r from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable log_all from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule build_pdf (line 29, /tmp/tmptmii62qa/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable run_r from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable log_all from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule clean (line 48, /tmp/tmptmii62qa/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule renv_install (line 10, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable runR from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule renv_consent (line 15, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule renv_init (line 20, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule renv_snap (line 25, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule renv_restore (line 30, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 0, + "topics": [], + "updated_at": 1726069627.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "[INSERM U981] WGS\u0026WES Pipelines", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmppavg8cvv/workflow/rules/init_pipeline.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmppavg8cvv/workflow/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 62:0: \u003cline number missing in source\u003e\n```\n\n[INFO] In file \"/tmp/tmppavg8cvv/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n[INFO] In file \"/tmp/tmppavg8cvv/workflow/Snakefile\": 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "jinxin-wang/Genome_Sequencing_Analysis", + "latest_release": null, + "linting": "FileNotFoundError in file /tmp/tmppavg8cvv/workflow/rules/init_pipeline.smk, line 118:\n[Errno 2] No such file or directory: \u0027config/variant_call_list_TvN.tsv\u0027\n File \"/tmp/tmppavg8cvv/workflow/rules/init_pipeline.smk\", line 118, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 2, + "topics": [], + "updated_at": 1726062684.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n\u003cunknown\u003e:14: SyntaxWarning: invalid escape sequence \u0027\\-\u0027\n[DEBUG] In file \"/tmp/tmpdyuhxgko/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "fredjarlier/ProjectCNN", + "latest_release": null, + "linting": "/tmp/tmpdyuhxgko/Snakefile:78: SyntaxWarning: invalid escape sequence \u0027\\-\u0027\n \"{yes}.bam\"\n/tmp/tmpdyuhxgko/Snakefile:85: SyntaxWarning: invalid escape sequence \u0027\\-\u0027\n input:\nKeyError in file /tmp/tmpdyuhxgko/Snakefile, line 31:\n\u0027lanes\u0027\n File \"/tmp/tmpdyuhxgko/Snakefile\", line 31, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 2, + "topics": [], + "updated_at": 1726058223.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Mu-seq Workflow Utility ", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/insertion_identification.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Keyword \"input\" at line 93 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Keyword \"input\" at line 129 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Keyword \"input\" at line 168 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Keyword \"input\" at line 200 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref.smk\": InvalidPython: Black error:\n```\nCannot parse: 18:4: run:\n```\n\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref.smk\": \n[ERROR] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/trimming.smk\": InvalidPython: Black error:\n```\nCannot parse: 45:4: log:\n```\n\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/trimming.smk\": \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/common_utils.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/quality_control.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/insertion_annotation.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref_utils.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk\": InvalidPython: Black error:\n```\nCannot parse: 14:7: output:\n```\n\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk\": \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/indexes_bam2sam.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/duplicate_removal.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/mapping.smk\": Inline-formatted keyword \"threads\" at line 80 had its comments relocated above it.\nPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/mapping.smk\": Formatted content is different from original\n[INFO] 3 file(s) raised parsing errors \ud83e\udd15\n[INFO] 10 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "tgstoecker/MuWU", + "latest_release": "v1.5.0", + "linting": "/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref.smk:129: SyntaxWarning: invalid escape sequence \u0027\\.\u0027\n \"logs/annotation_table/annotation_table.log\"\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref.smk:142: SyntaxWarning: invalid escape sequence \u0027\\.\u0027\n \".1.bt2\", \".2.bt2\", \".3.bt2\", \".4.bt2\", \".rev.1.bt2\", \".rev.2.bt2\",\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk:45: SyntaxWarning: invalid escape sequence \u0027\\s\u0027\n sed -i \u00271i Name\\\\tStrand\\\\tType\u0027 {output}\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk:53: SyntaxWarning: invalid escape sequence \u0027\\s\u0027\n read_typing=expand(\"results/te_typing/pre_sorting/{sample}/{sample}_te_types_merged.tsv\", sample=SAMPLES),\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk:510: SyntaxWarning: invalid escape sequence \u0027\\(\u0027\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk:519: SyntaxWarning: invalid escape sequence \u0027\\(\u0027\nLints for rule read_te_typing (line 8, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule merging_read_te_typing (line 34, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule te_typing_annotation (line 51, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule te_typing_annotation_propagation_GRID (line 79, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule get_uncategorized_ins_reads_1 (line 134, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule get_uncategorized_ins_reads_2 (line 151, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule merge_uncategorized_ins_reads (line 167, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule index_categorized_ins_reads (line 184, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule locate_motif (line 197, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule extend_motif (line 217, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule extract_unc_ins_motif_subregions (line 232, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule rename_unc_ins_motif_subregions (line 250, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule concat_motif_subregions_files (line 263, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule clustering_unc_reads_final_set (line 276, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule reformat_clustering_results_1 (line 293, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule reformat_clustering_results_2 (line 306, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule final_cluster_sizes (line 323, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726050889.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmp2dl8hrsu/Snakefile\": Keyword \"shell\" at line 260 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp2dl8hrsu/Snakefile\": Keyword \"shell\" at line 548 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmp2dl8hrsu/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "HealthML/MFD", + "latest_release": null, + "linting": "WorkflowError in file /tmp/tmp2dl8hrsu/Snakefile, line 1:\nWorkflow defines configfile src/config.yaml but it is not present or accessible (full checked path: /tmp/tmp2dl8hrsu/src/config.yaml).\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 4, + "topics": [], + "updated_at": 1726044610.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[ERROR] In file \"/tmp/tmpqoiceb5e/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 6:8: storage:\n```\n\n[INFO] In file \"/tmp/tmpqoiceb5e/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", + "full_name": "dakesan/snakemake-s3-minimum-example", + "latest_release": null, + "linting": "InvalidPluginException in file /tmp/tmpqoiceb5e/Snakefile, line 6:\nError loading Snakemake plugin s3: The package snakemake-storage-plugin-s3 is not installed.\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726027343.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmp93c8a3i2/workflow/Snakefile\": Keyword \"output\" at line 39 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp93c8a3i2/workflow/Snakefile\": Keyword \"params\" at line 53 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmp93c8a3i2/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "MetaSUB-CAMP/camp_mag-qc", + "latest_release": null, + "linting": "KeyError in file /tmp/tmp93c8a3i2/workflow/Snakefile, line 13:\n\u0027work_dir\u0027\n File \"/tmp/tmp93c8a3i2/workflow/Snakefile\", line 13, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726024734.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmpij19n6zn/workflow/Snakefile\": Keyword \"input\" at line 38 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[ERROR] In file \"/tmp/tmpij19n6zn/workflow/Snakefile\": NoParametersError: L189: In output definition.\n[INFO] In file \"/tmp/tmpij19n6zn/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", + "full_name": "palomnyk/NHANES_Complexity", + "latest_release": null, + "linting": "SyntaxError in file /tmp/tmpij19n6zn/workflow/Snakefile, line 104:\nCommand must be given as string after the shell keyword.:\nNone\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726014098.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/clustering.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/gliph.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/plotting.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/conga.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/seurat_preprocessing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/tcr_dist.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/differential_expression.smk\": Formatted content is different from original\n[INFO] 8 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "carpenter-lab/single-cell-workflow", + "latest_release": null, + "linting": "\nEidoValidationError in file /tmp/tmp5xd23uhl/workflow/Snakefile, line 13:\nEidoValidationError (Validation failed): {\"\u0027_samples\u0027 is a required property\": [{\u0027type\u0027: \"\u0027_samples\u0027 is a required property\", \u0027message\u0027: \"\u0027_samples\u0027 is a required property on instance project\", \u0027sample_name\u0027: \u0027project\u0027}]}\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/eido/validation.py\", line 73, in validate_project\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/eido/validation.py\", line 56, in _validate_object\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1725999338.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "A thin wrapper to simplify running cellprofiler on della", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpsxp2ehq9/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "SoftLivingMatter/snakemake-cellprofiler", + "latest_release": null, + "linting": "Creating specified working directory /path/to/output/directory.\nPermissionError in file /tmp/tmpsxp2ehq9/workflow/Snakefile, line 7:\n[Errno 13] Permission denied: \u0027/path\u0027\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1311, in mkdir\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 0, + "topics": [], + "updated_at": 1726177733.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/searchgui.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/raw_file_parse.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/generate_db.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/peptide_shaker.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/post_processing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/gff_generation.smk\": Formatted content is different from original\n[INFO] 7 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "PRIDE-reanalysis/MetaPUF", + "latest_release": null, + "linting": "NotImplementedError in file /tmp/tmp5yqp4tmy/workflow/rules/raw_file_parse.smk, line 10:\nRemote providers have been replaced by Snakemake storage plugins. Please use the corresponding storage plugin instead (snakemake-storage-plugin-*).\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 2, + "topics": [], + "updated_at": 1725982636.0 + }, + { + "config_readme": "\u003cp\u003eDescribe how to configure the workflow (using config.yaml and maybe additional files).\nAll of them need to be present with example entries inside of the config folder.\u003c/p\u003e\n", + "data_format": 2, + "description": "Generating consistent TC and TC Surge hazard set event using Snakemake and CLIMADA", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpybi507vm/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "spjuhel/Climada_TC_TCSurge_Generation", + "latest_release": null, + "linting": "KeyError in file /tmp/tmpybi507vm/workflow/Snakefile, line 27:\n\u0027start\u0027\n File \"/tmp/tmpybi507vm/workflow/Snakefile\", line 27, in \u003cmodule\u003e\n", + "mandatory_flags": { + "desc": null, + "flags": null + }, + "report": true, + "software_stack_deployment": { + "conda": true, + "singularity": false, + "singularity+conda": false + }, + "standardized": true, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1725981203.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmp_8rvuhe_/workflow/rules/phylogeny.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[INFO] 1 file(s) would be changed \ud83d\ude2c\n[INFO] 4 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", + "full_name": "koisland/CenClustering", + "latest_release": null, + "linting": "Multiple includes of /tmp/tmp_8rvuhe_/workflow/rules/data.smk ignored\nLints for snakefile https://:ghs_9K7szIWhrwDYcwcKyxn0stCzzoXvKe0s9Fdn@raw.githubusercontent.com/koisland/asm-to-reference-alignment/minimal/workflow/Snakefile:\n * Absolute path \"/tmp\" in line 13:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n\nLints for snakefile /tmp/tmp_8rvuhe_/workflow/rules/phylogeny.smk:\n * Deprecated singularity directive used for container definition in line 25.:\n Use the container directive instead (it is agnostic of the underlying\n container runtime).\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule wget (line 1, /tmp/tmp_8rvuhe_/workflow/rules/utils.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule get_chm13_asm (line 6, /tmp/tmp_8rvuhe_/workflow/rules/data.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule get_chimp_asm (line 28, /tmp/tmp_8rvuhe_/workflow/rules/data.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1725980621.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Snakemake pipeline for parsing simulation outputs across scaling factors", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpkpgy83jb/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "SchriderLab/simscale-snakemake", + "latest_release": null, + "linting": "TypeError in file /tmp/tmpkpgy83jb/workflow/Snakefile, line 11:\n\u0027NoneType\u0027 object is not subscriptable\n File \"/tmp/tmpkpgy83jb/workflow/Snakefile\", line 11, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1725894756.0 + }, + { + "config_readme": "\u003cp\u003eDescribe how to configure the workflow (using config.yaml and maybe additional files).\nAll of them need to be present with example entries inside of the config folder.\u003c/p\u003e\n", + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpiobvfk4r/workflow/rules/toblow5.smk\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\d\u0027\n[DEBUG] In file \"/tmp/tmpiobvfk4r/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpiobvfk4r/workflow/rules/fetch_data.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpiobvfk4r/workflow/rules/common.smk\": Formatted content is different from original\n[INFO] 4 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "loganylchen/prep_public_nanoporeDRS_data", + "latest_release": null, + "linting": "/tmp/tmpiobvfk4r/workflow/Snakefile:9: SyntaxWarning: invalid escape sequence \u0027\\d\u0027\n sample=\"[\\da-zA-Z]+\"\nFileNotFoundError in file /tmp/tmpiobvfk4r/workflow/rules/common.smk, line 6:\n[Errno 2] No such file or directory: \u0027config/samples.tsv\u0027\n File \"/tmp/tmpiobvfk4r/workflow/rules/common.smk\", line 6, in \u003cmodule\u003e\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1026, in read_csv\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 620, in _read\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1620, in __init__\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1880, in _make_engine\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/common.py\", line 873, in get_handle\n", + "mandatory_flags": { + "desc": null, + "flags": null + }, + "report": true, + "software_stack_deployment": { + "conda": true, + "singularity": false, + "singularity+conda": false + }, + "standardized": true, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1725894100.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpy9boas6k/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "daiuc/splice-pub", + "latest_release": null, + "linting": "Lints for snakefile /tmp/tmpy9boas6k/Snakefile:\n * Absolute path \"/project/yangili1/cdai/SpliFi\" in line 41:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/splice-pub/smk-plots/gtex-sqtl-enrichment-v4\" in line 42:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/splice-pub/data/WGS_Feature_overlap_collapsed_VEP_short_4torus.MAF01.txt.gz\" in line 44:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/SpliFi\" in line 58:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/splice-pub/smk-plots/gtex-sqtl-enrichment-v4-C\" in line 58:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/splice-pub/data/WGS_Feature_overlap_collapsed_VEP_short_4torus.MAF01.txt.gz\" in line 60:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Path composition with \u0027+\u0027 in line 15:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n\nLints for rule plotGTExsQTLEnrichment (line 33, /tmp/tmpy9boas6k/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule plotGTExsQTLEnrichment_test (line 76, /tmp/tmpy9boas6k/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule prepTorusInputData (line 63, /tmp/tmpy9boas6k/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param outprefix is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule RunTorusEnrich (line 85, /tmp/tmpy9boas6k/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1725892780.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[ERROR] In file \"/tmp/tmpl4vsaazr/Snakefile\": SyntaxError: L107: Unrecognised keyword \u0027savedir\u0027 in rule definition\n[INFO] In file \"/tmp/tmpl4vsaazr/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", + "full_name": "ML4GW/gwak", + "latest_release": null, + "linting": "Lints for snakefile /tmp/tmpl4vsaazr/Snakefile:\n * Absolute path \"/home/katya.govorkova/gwak/{version}/data/{dataclass}.npz\" in line 87:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n\nLints for rule find_valid_segments (line 31, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule run_omicron (line 40, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule fetch_site_data (line 52, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule generate_data (line 64, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable PERIOD from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule upload_data (line 81, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule validate_data (line 92, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable VERSION from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule train_quak (line 101, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule generate_timeslides_for_far (line 113, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule evaluate_signals (line 137, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule generate_timeslides_for_fm (line 150, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule train_final_metric (line 173, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule recreation_and_quak_plots (line 192, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule compute_far (line 208, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule merge_far_hist (line 232, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule quak_plotting_prediction_and_recreation (line 242, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule plot_results (line 257, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param evaluation_dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule make_pipeline_plot (line 272, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 4, + "topics": [], + "updated_at": 1725890954.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpijdr3wdn/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "iedun/XDEM-Project", + "latest_release": null, + "linting": "RuleException in file /tmp/tmpijdr3wdn/Snakefile, line 32:\nOnly input files can be specified as functions\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 0, + "subscribers_count": 1, + "topics": [], + "updated_at": 1725890597.0 + }, { "config_readme": null, "data_format": 2, @@ -4365,309 +4872,6 @@ var data = "topics": [], "updated_at": 1725307570.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "[INSERM U981] WGS\u0026WES Pipelines", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmppavg8cvv/workflow/rules/init_pipeline.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmppavg8cvv/workflow/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 62:0: \u003cline number missing in source\u003e\n```\n\n[INFO] In file \"/tmp/tmppavg8cvv/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n[INFO] In file \"/tmp/tmppavg8cvv/workflow/Snakefile\": 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "jinxin-wang/Genome_Sequencing_Analysis", - "latest_release": null, - "linting": "FileNotFoundError in file /tmp/tmppavg8cvv/workflow/rules/init_pipeline.smk, line 118:\n[Errno 2] No such file or directory: \u0027config/variant_call_list_TvN.tsv\u0027\n File \"/tmp/tmppavg8cvv/workflow/rules/init_pipeline.smk\", line 118, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 2, - "topics": [], - "updated_at": 1726062684.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n\u003cunknown\u003e:14: SyntaxWarning: invalid escape sequence \u0027\\-\u0027\n[DEBUG] In file \"/tmp/tmpdyuhxgko/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "fredjarlier/ProjectCNN", - "latest_release": null, - "linting": "/tmp/tmpdyuhxgko/Snakefile:78: SyntaxWarning: invalid escape sequence \u0027\\-\u0027\n \"{yes}.bam\"\n/tmp/tmpdyuhxgko/Snakefile:85: SyntaxWarning: invalid escape sequence \u0027\\-\u0027\n input:\nKeyError in file /tmp/tmpdyuhxgko/Snakefile, line 31:\n\u0027lanes\u0027\n File \"/tmp/tmpdyuhxgko/Snakefile\", line 31, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 2, - "topics": [], - "updated_at": 1726058223.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Mu-seq Workflow Utility ", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/insertion_identification.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Keyword \"input\" at line 93 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Keyword \"input\" at line 129 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Keyword \"input\" at line 168 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Keyword \"input\" at line 200 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref.smk\": InvalidPython: Black error:\n```\nCannot parse: 18:4: run:\n```\n\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref.smk\": \n[ERROR] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/trimming.smk\": InvalidPython: Black error:\n```\nCannot parse: 45:4: log:\n```\n\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/trimming.smk\": \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/common_utils.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/quality_control.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/insertion_annotation.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref_utils.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk\": InvalidPython: Black error:\n```\nCannot parse: 14:7: output:\n```\n\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk\": \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/indexes_bam2sam.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/duplicate_removal.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/mapping.smk\": Inline-formatted keyword \"threads\" at line 80 had its comments relocated above it.\nPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/mapping.smk\": Formatted content is different from original\n[INFO] 3 file(s) raised parsing errors \ud83e\udd15\n[INFO] 10 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "tgstoecker/MuWU", - "latest_release": "v1.5.0", - "linting": "/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref.smk:129: SyntaxWarning: invalid escape sequence \u0027\\.\u0027\n \"logs/annotation_table/annotation_table.log\"\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/ref.smk:142: SyntaxWarning: invalid escape sequence \u0027\\.\u0027\n \".1.bt2\", \".2.bt2\", \".3.bt2\", \".4.bt2\", \".rev.1.bt2\", \".rev.2.bt2\",\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk:45: SyntaxWarning: invalid escape sequence \u0027\\s\u0027\n sed -i \u00271i Name\\\\tStrand\\\\tType\u0027 {output}\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk:53: SyntaxWarning: invalid escape sequence \u0027\\s\u0027\n read_typing=expand(\"results/te_typing/pre_sorting/{sample}/{sample}_te_types_merged.tsv\", sample=SAMPLES),\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk:510: SyntaxWarning: invalid escape sequence \u0027\\(\u0027\n/tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk:519: SyntaxWarning: invalid escape sequence \u0027\\(\u0027\nLints for rule read_te_typing (line 8, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule merging_read_te_typing (line 34, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule te_typing_annotation (line 51, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule te_typing_annotation_propagation_GRID (line 79, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule get_uncategorized_ins_reads_1 (line 134, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule get_uncategorized_ins_reads_2 (line 151, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule merge_uncategorized_ins_reads (line 167, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule index_categorized_ins_reads (line 184, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule locate_motif (line 197, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule extend_motif (line 217, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule extract_unc_ins_motif_subregions (line 232, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule rename_unc_ins_motif_subregions (line 250, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule concat_motif_subregions_files (line 263, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule clustering_unc_reads_final_set (line 276, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule reformat_clustering_results_1 (line 293, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule reformat_clustering_results_2 (line 306, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule final_cluster_sizes (line 323, /tmp/tmpjsaaylsc/tgstoecker-MuWU-cbf7dbb/workflow/rules/te_typing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726050889.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmp2dl8hrsu/Snakefile\": Keyword \"shell\" at line 260 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp2dl8hrsu/Snakefile\": Keyword \"shell\" at line 548 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmp2dl8hrsu/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "HealthML/MFD", - "latest_release": null, - "linting": "WorkflowError in file /tmp/tmp2dl8hrsu/Snakefile, line 1:\nWorkflow defines configfile src/config.yaml but it is not present or accessible (full checked path: /tmp/tmp2dl8hrsu/src/config.yaml).\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 4, - "topics": [], - "updated_at": 1726044610.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[ERROR] In file \"/tmp/tmpqoiceb5e/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 6:8: storage:\n```\n\n[INFO] In file \"/tmp/tmpqoiceb5e/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", - "full_name": "dakesan/snakemake-s3-minimum-example", - "latest_release": null, - "linting": "InvalidPluginException in file /tmp/tmpqoiceb5e/Snakefile, line 6:\nError loading Snakemake plugin s3: The package snakemake-storage-plugin-s3 is not installed.\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726027343.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmp93c8a3i2/workflow/Snakefile\": Keyword \"output\" at line 39 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp93c8a3i2/workflow/Snakefile\": Keyword \"params\" at line 53 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmp93c8a3i2/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "MetaSUB-CAMP/camp_mag-qc", - "latest_release": null, - "linting": "KeyError in file /tmp/tmp93c8a3i2/workflow/Snakefile, line 13:\n\u0027work_dir\u0027\n File \"/tmp/tmp93c8a3i2/workflow/Snakefile\", line 13, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726024734.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmpij19n6zn/workflow/Snakefile\": Keyword \"input\" at line 38 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[ERROR] In file \"/tmp/tmpij19n6zn/workflow/Snakefile\": NoParametersError: L189: In output definition.\n[INFO] In file \"/tmp/tmpij19n6zn/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", - "full_name": "palomnyk/NHANES_Complexity", - "latest_release": null, - "linting": "SyntaxError in file /tmp/tmpij19n6zn/workflow/Snakefile, line 104:\nCommand must be given as string after the shell keyword.:\nNone\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726014098.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/clustering.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/gliph.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/plotting.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/conga.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/seurat_preprocessing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/tcr_dist.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5xd23uhl/workflow/rules/differential_expression.smk\": Formatted content is different from original\n[INFO] 8 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "carpenter-lab/single-cell-workflow", - "latest_release": null, - "linting": "\nEidoValidationError in file /tmp/tmp5xd23uhl/workflow/Snakefile, line 13:\nEidoValidationError (Validation failed): {\"\u0027_samples\u0027 is a required property\": [{\u0027type\u0027: \"\u0027_samples\u0027 is a required property\", \u0027message\u0027: \"\u0027_samples\u0027 is a required property on instance project\", \u0027sample_name\u0027: \u0027project\u0027}]}\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/eido/validation.py\", line 73, in validate_project\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/eido/validation.py\", line 56, in _validate_object\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1725999338.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "A thin wrapper to simplify running cellprofiler on della", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpsxp2ehq9/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "SoftLivingMatter/snakemake-cellprofiler", - "latest_release": null, - "linting": "Creating specified working directory /path/to/output/directory.\nPermissionError in file /tmp/tmpsxp2ehq9/workflow/Snakefile, line 7:\n[Errno 13] Permission denied: \u0027/path\u0027\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1311, in mkdir\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 0, - "topics": [], - "updated_at": 1726177733.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/searchgui.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/raw_file_parse.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/generate_db.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/peptide_shaker.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/post_processing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp5yqp4tmy/workflow/rules/gff_generation.smk\": Formatted content is different from original\n[INFO] 7 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "PRIDE-reanalysis/MetaPUF", - "latest_release": null, - "linting": "NotImplementedError in file /tmp/tmp5yqp4tmy/workflow/rules/raw_file_parse.smk, line 10:\nRemote providers have been replaced by Snakemake storage plugins. Please use the corresponding storage plugin instead (snakemake-storage-plugin-*).\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 2, - "topics": [], - "updated_at": 1725982636.0 - }, - { - "config_readme": "\u003cp\u003eDescribe how to configure the workflow (using config.yaml and maybe additional files).\nAll of them need to be present with example entries inside of the config folder.\u003c/p\u003e\n", - "data_format": 2, - "description": "Generating consistent TC and TC Surge hazard set event using Snakemake and CLIMADA", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpybi507vm/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "spjuhel/Climada_TC_TCSurge_Generation", - "latest_release": null, - "linting": "KeyError in file /tmp/tmpybi507vm/workflow/Snakefile, line 27:\n\u0027start\u0027\n File \"/tmp/tmpybi507vm/workflow/Snakefile\", line 27, in \u003cmodule\u003e\n", - "mandatory_flags": { - "desc": null, - "flags": null - }, - "report": true, - "software_stack_deployment": { - "conda": true, - "singularity": false, - "singularity+conda": false - }, - "standardized": true, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1725981203.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmp_8rvuhe_/workflow/rules/phylogeny.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[INFO] 1 file(s) would be changed \ud83d\ude2c\n[INFO] 4 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", - "full_name": "koisland/CenClustering", - "latest_release": null, - "linting": "Multiple includes of /tmp/tmp_8rvuhe_/workflow/rules/data.smk ignored\nLints for snakefile https://:ghs_9K7szIWhrwDYcwcKyxn0stCzzoXvKe0s9Fdn@raw.githubusercontent.com/koisland/asm-to-reference-alignment/minimal/workflow/Snakefile:\n * Absolute path \"/tmp\" in line 13:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n\nLints for snakefile /tmp/tmp_8rvuhe_/workflow/rules/phylogeny.smk:\n * Deprecated singularity directive used for container definition in line 25.:\n Use the container directive instead (it is agnostic of the underlying\n container runtime).\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule wget (line 1, /tmp/tmp_8rvuhe_/workflow/rules/utils.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule get_chm13_asm (line 6, /tmp/tmp_8rvuhe_/workflow/rules/data.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule get_chimp_asm (line 28, /tmp/tmp_8rvuhe_/workflow/rules/data.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1725980621.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Snakemake pipeline for parsing simulation outputs across scaling factors", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpkpgy83jb/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "SchriderLab/simscale-snakemake", - "latest_release": null, - "linting": "TypeError in file /tmp/tmpkpgy83jb/workflow/Snakefile, line 11:\n\u0027NoneType\u0027 object is not subscriptable\n File \"/tmp/tmpkpgy83jb/workflow/Snakefile\", line 11, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1725894756.0 - }, - { - "config_readme": "\u003cp\u003eDescribe how to configure the workflow (using config.yaml and maybe additional files).\nAll of them need to be present with example entries inside of the config folder.\u003c/p\u003e\n", - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpiobvfk4r/workflow/rules/toblow5.smk\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\d\u0027\n[DEBUG] In file \"/tmp/tmpiobvfk4r/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpiobvfk4r/workflow/rules/fetch_data.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpiobvfk4r/workflow/rules/common.smk\": Formatted content is different from original\n[INFO] 4 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "loganylchen/prep_public_nanoporeDRS_data", - "latest_release": null, - "linting": "/tmp/tmpiobvfk4r/workflow/Snakefile:9: SyntaxWarning: invalid escape sequence \u0027\\d\u0027\n sample=\"[\\da-zA-Z]+\"\nFileNotFoundError in file /tmp/tmpiobvfk4r/workflow/rules/common.smk, line 6:\n[Errno 2] No such file or directory: \u0027config/samples.tsv\u0027\n File \"/tmp/tmpiobvfk4r/workflow/rules/common.smk\", line 6, in \u003cmodule\u003e\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1026, in read_csv\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 620, in _read\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1620, in __init__\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1880, in _make_engine\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/common.py\", line 873, in get_handle\n", - "mandatory_flags": { - "desc": null, - "flags": null - }, - "report": true, - "software_stack_deployment": { - "conda": true, - "singularity": false, - "singularity+conda": false - }, - "standardized": true, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1725894100.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpy9boas6k/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "daiuc/splice-pub", - "latest_release": null, - "linting": "Lints for snakefile /tmp/tmpy9boas6k/Snakefile:\n * Absolute path \"/project/yangili1/cdai/SpliFi\" in line 41:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/splice-pub/smk-plots/gtex-sqtl-enrichment-v4\" in line 42:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/splice-pub/data/WGS_Feature_overlap_collapsed_VEP_short_4torus.MAF01.txt.gz\" in line 44:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/SpliFi\" in line 58:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/splice-pub/smk-plots/gtex-sqtl-enrichment-v4-C\" in line 58:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/project/yangili1/cdai/splice-pub/data/WGS_Feature_overlap_collapsed_VEP_short_4torus.MAF01.txt.gz\" in line 60:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Path composition with \u0027+\u0027 in line 15:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n\nLints for rule plotGTExsQTLEnrichment (line 33, /tmp/tmpy9boas6k/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule plotGTExsQTLEnrichment_test (line 76, /tmp/tmpy9boas6k/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule prepTorusInputData (line 63, /tmp/tmpy9boas6k/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param outprefix is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule RunTorusEnrich (line 85, /tmp/tmpy9boas6k/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1725892780.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[ERROR] In file \"/tmp/tmpl4vsaazr/Snakefile\": SyntaxError: L107: Unrecognised keyword \u0027savedir\u0027 in rule definition\n[INFO] In file \"/tmp/tmpl4vsaazr/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", - "full_name": "ML4GW/gwak", - "latest_release": null, - "linting": "Lints for snakefile /tmp/tmpl4vsaazr/Snakefile:\n * Absolute path \"/home/katya.govorkova/gwak/{version}/data/{dataclass}.npz\" in line 87:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n\nLints for rule find_valid_segments (line 31, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule run_omicron (line 40, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule fetch_site_data (line 52, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule generate_data (line 64, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable PERIOD from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule upload_data (line 81, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule validate_data (line 92, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable VERSION from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule train_quak (line 101, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule generate_timeslides_for_far (line 113, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule evaluate_signals (line 137, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule generate_timeslides_for_fm (line 150, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule train_final_metric (line 173, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule recreation_and_quak_plots (line 192, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule compute_far (line 208, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule merge_far_hist (line 232, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule quak_plotting_prediction_and_recreation (line 242, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule plot_results (line 257, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param evaluation_dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule make_pipeline_plot (line 272, /tmp/tmpl4vsaazr/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 4, - "topics": [], - "updated_at": 1725890954.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpijdr3wdn/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "iedun/XDEM-Project", - "latest_release": null, - "linting": "RuleException in file /tmp/tmpijdr3wdn/Snakefile, line 32:\nOnly input files can be specified as functions\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1725890597.0 - }, { "config_readme": null, "data_format": 2, @@ -4791,210 +4995,6 @@ var data = "topics": [], "updated_at": 1725656443.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "Barcode Recovery and Comparison from Database", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/rules/commons.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\s\u0027\n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/rules/reports.smk\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\q\u0027\n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/rules/pairwise_alignement.smk\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\s\u0027\n[DEBUG] In file \"/tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/rules/extract_barcodes.smk\": Formatted content is different from original\n[INFO] 5 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "CVUA-RRW/BaRCoD", - "latest_release": "1.1.1", - "linting": "WorkflowError in file /tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/Snakefile, line 12:\nWorkflow defines configfile /tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/workflow/../.tests/config/config.yaml but it is not present or accessible (full checked path: /tmp/tmprlaxvokq/CVUA-RRW-BaRCoD-e366aef/.tests/config/config.yaml).\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 0, - "topics": [], - "updated_at": 1726142474.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Find single-stranded DNA breaks with an error-prone DNA polymerase", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp_sbd6gc0/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "NBISweden/Sloppymerase", - "latest_release": null, - "linting": "Lints for rule count_illumina_reads (line 24, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule map_illumina (line 33, /tmp/tmp_sbd6gc0/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule count_nanopore_pacbio_reads (line 49, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule map_nanopore (line 57, /tmp/tmp_sbd6gc0/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule map_pacbio (line 71, /tmp/tmp_sbd6gc0/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule detect_breaks (line 84, /tmp/tmp_sbd6gc0/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule sort_bed (line 106, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule subtract_illumina_controls (line 116, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule simulate_nickase (line 128, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule random_sites (line 141, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule intersect_nickase_sites (line 153, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule intersect_random_sites (line 162, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule index_bed (line 171, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule index_bam (line 177, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule bgzip_bed (line 182, /tmp/tmp_sbd6gc0/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 7, - "topics": [], - "updated_at": 1726141738.0 - }, - { - "config_readme": "\u003cp\u003eDescribe how to configure the workflow (using config.yaml and maybe additional files).\nAll of them need to be present with example entries inside of the config folder.\u003c/p\u003e\n", - "data_format": 2, - "description": "create long read (Nanopore with NanoSim) and short read (Illumina with Circle-Map) testing data for ecDNA analysis workflows", - "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmpiw53r2g_/workflow/Snakefile\": Keyword \"params\" at line 56 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[ERROR] In file \"/tmp/tmpiw53r2g_/workflow/Snakefile\": IndexError: pop from empty list\n[INFO] In file \"/tmp/tmpiw53r2g_/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", - "full_name": "dlaehnemann/create-ecdna-testing-data", - "latest_release": null, - "linting": "Workflow defines that rule get_chromosomes_ref is eligible for caching between workflows (use the --cache argument to enable this).\nLints for snakefile /tmp/tmpiw53r2g_/workflow/Snakefile:\n * Absolute path \"/human_NA12878_DNA_FAB49712_guppy/training\" in line 12:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/human_NA12878_DNA_FAB49712_guppy/training\" in line 16:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n\nLints for rule config_segment_to_bed (line 65, /tmp/tmpiw53r2g_/workflow/Snakefile):\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule create_samples_sheet (line 277, /tmp/tmpiw53r2g_/workflow/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule create_units_sheet (line 305, /tmp/tmpiw53r2g_/workflow/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\n", - "mandatory_flags": { - "desc": null, - "flags": null - }, - "report": true, - "software_stack_deployment": { - "conda": true, - "singularity": false, - "singularity+conda": false - }, - "standardized": true, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726140775.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Just annotate the MAGs as in mg_assembly", - "formatting": null, - "full_name": "3d-omics/mg_annotate", - "latest_release": null, - "linting": "Lints for snakefile /tmp/tmpi0370ikk/workflow/rules/dram.smk:\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n\nLints for rule checkm2__quality_report__ (line 27, /tmp/tmpi0370ikk/workflow/rules/checkm2.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726134655.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "La idea es crear una p\u00e1gina web con especies de fauna y flora de Gran Canaria que vaya encontrando, vamos a ver como hacemos esto", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp7el7akpu/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "JuanCarlosBio/BiodiversidadGJC", - "latest_release": null, - "linting": "Lints for rule download_images (line 26, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule download_canary_islands_shp (line 41, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule download_extra_layers (line 54, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule download_biota_data (line 71, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule process_canary_islands_shp (line 83, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule process_jardin_botanico_kml (line 99, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule process_biota_data (line 112, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule figures_and_stats (line 146, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule webpage_html (line 165, /tmp/tmp7el7akpu/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726127542.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "A Snakemake pipeline to analyze RNA-seq expression data", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/diffexp.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/quantification.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmp31et65mg/workflow/Snakefile\": Keyword \"input\" at line 44 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/plots.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/splicing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/trim.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp31et65mg/workflow/rules/align.smk\": Formatted content is different from original\n[INFO] 7 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "kristinassong/RNAseq", - "latest_release": null, - "linting": "Lints for rule star_index (line 1, /tmp/tmp31et65mg/workflow/rules/align.smk):\n * Param idx is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule star_align (line 27, /tmp/tmp31et65mg/workflow/rules/align.smk):\n * Param out_prefix is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param idx is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule genomecov (line 97, /tmp/tmp31et65mg/workflow/rules/align.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule kallisto_quant (line 38, /tmp/tmp31et65mg/workflow/rules/quantification.smk):\n * Param outdir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule tx2gene (line 67, /tmp/tmp31et65mg/workflow/rules/quantification.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule merge_kallisto_quant (line 80, /tmp/tmp31et65mg/workflow/rules/quantification.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule deseq2 (line 3, /tmp/tmp31et65mg/workflow/rules/diffexp.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Param kallisto_dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param out_dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule volcano_plot (line 22, /tmp/tmp31et65mg/workflow/rules/diffexp.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule pca (line 1, /tmp/tmp31et65mg/workflow/rules/plots.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule multiqc (line 17, /tmp/tmp31et65mg/workflow/rules/plots.smk):\n * Param outdir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule rmats (line 1, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule filter_rmats (line 23, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Param dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule rmats_paired_env (line 46, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule rmats_paired (line 62, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule filter_rmats_paired (line 85, /tmp/tmp31et65mg/workflow/rules/splicing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Param dir is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [ - "rna-seq-pipeline", - "snakemake-workflow" - ], - "updated_at": 1726112847.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Workflow for processing UAS imagery into data on bird location and species for near real-time monitoring in the Everglades", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpopwg9ani/weecology-everwatch-workflow-2e6fceb/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "weecology/everwatch-workflow", - "latest_release": "v0.2.0", - "linting": "WorkflowError in file /tmp/tmpopwg9ani/weecology-everwatch-workflow-2e6fceb/Snakefile, line 5:\nWorkflow defines configfile /blue/ewhite/everglades/everwatch-workflow/snakemake_config.yml but it is not present or accessible (full checked path: /blue/ewhite/everglades/everwatch-workflow/snakemake_config.yml).\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 4, - "topics": [], - "updated_at": 1726093707.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[ERROR] In file \"/tmp/tmpb_4sx3jl/workflow/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 75:0: EOF in multi-line statement\n```\n\n[INFO] In file \"/tmp/tmpb_4sx3jl/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", - "full_name": "SoftLivingMatter/5eU-seq-pipelines", - "latest_release": null, - "linting": "Creating specified working directory /your/directory/here.\nPermissionError in file /tmp/tmpb_4sx3jl/workflow/Snakefile, line 7:\n[Errno 13] Permission denied: \u0027/your\u0027\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1315, in mkdir\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/pathlib.py\", line 1311, in mkdir\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 0, - "topics": [], - "updated_at": 1726177806.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Workflow to identify the centromere dip region (CDR) from methyl BAMs", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpumxp1g0a/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[INFO] 1 file(s) would be changed \ud83d\ude2c\n[INFO] 1 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", - "full_name": "koisland/CDR-Finder", - "latest_release": null, - "linting": null, - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726071971.0 - }, - { - "config_readme": "\u003cp\u003eDescribe how to configure the workflow (using config.yaml and maybe additional files).\nAll of them need to be present with example entries inside of the config folder.\u003c/p\u003e\n", - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpbyphbz3g/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "vkpat/short-read-mapping-and-QC-snakemake-pipeline", - "latest_release": null, - "linting": "WorkflowError in file /tmp/tmpbyphbz3g/workflow/Snakefile, line 1:\nWorkflow defines configfile config.yaml but it is not present or accessible (full checked path: /tmp/tmpbyphbz3g/config.yaml).\n", - "mandatory_flags": { - "desc": null, - "flags": null - }, - "report": true, - "software_stack_deployment": { - "conda": true, - "singularity": false, - "singularity+conda": false - }, - "standardized": true, - "stargazers_count": 0, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726070051.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmptmii62qa/rules/renv.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmptmii62qa/Snakefile\": Formatted content is different from original\n[INFO] 2 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "deer-marketing-lab/dsms-lecture-ugc-ratings", - "latest_release": null, - "linting": "Lints for rule build_html (line 16, /tmp/tmptmii62qa/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable run_r from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable log_all from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule build_pdf (line 29, /tmp/tmptmii62qa/Snakefile):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable run_r from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable log_all from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule clean (line 48, /tmp/tmptmii62qa/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule renv_install (line 10, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable runR from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule renv_consent (line 15, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule renv_init (line 20, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule renv_snap (line 25, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule renv_restore (line 30, /tmp/tmptmii62qa/rules/renv.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 0, - "subscribers_count": 0, - "topics": [], - "updated_at": 1726069627.0 - }, { "config_readme": null, "data_format": 2, @@ -36559,6 +36559,74 @@ var data = "topics": [], "updated_at": 1614368088.0 }, + { + "config_readme": null, + "data_format": 2, + "description": "An experimental nanopore only 16s Amplicon analysis ", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/guppy.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/nanoclust.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/emu.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpqwq3fr8_/workflow/Snakefile\": Keyword \"input\" at line 8 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/picrust2.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/qc.smk\": Formatted content is different from original\n[INFO] 7 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "matinnuhamunada/ONT_16S_Analysis", + "latest_release": null, + "linting": "Lints for rule emu_copy_table (line 37, /tmp/tmpqwq3fr8_/workflow/rules/emu.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule get_nanoclust_db (line 1, /tmp/tmpqwq3fr8_/workflow/rules/nanoclust.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule get_nanoclust (line 15, /tmp/tmpqwq3fr8_/workflow/rules/nanoclust.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 1, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726141971.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/index.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/bam_cleaning.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpc7_2x2p7/workflow/rules/annotate_variants.smk\": Keyword \"input\" at line 16 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/annotate_variants.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/quantification.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/HLA_typing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/base_recalibration.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/pMHC.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/alignment.smk\": Formatted content is different from original\n[INFO] 10 file(s) would be changed \ud83d\ude2c\n[INFO] 3 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", + "full_name": "ctglab/ENEO", + "latest_release": null, + "linting": null, + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 1, + "subscribers_count": 2, + "topics": [], + "updated_at": 1726137283.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Opinionated viral metagenomics workflow", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp_s8amaf5/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "dhoconno/nvd", + "latest_release": null, + "linting": "KeyError in file /tmp/tmp_s8amaf5/workflow/Snakefile, line 31:\n\u0027run_id\u0027\n File \"/tmp/tmp_s8amaf5/workflow/Snakefile\", line 31, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 1, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726077730.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/process_features.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/classify_variants.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/bam_to_fastq.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/variant_calling.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/filtering.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/process_bam.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/salmon.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/setup.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/annotations.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/query_vcfs.smk\": Formatted content is different from original\n[INFO] 11 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "nch-igm/VarRNA", + "latest_release": null, + "linting": "WorkflowError in file /tmp/tmpmd1x65hm/workflow/rules/setup.smk, line 8:\nWorkflow defines configfile ../config/config.yaml but it is not present or accessible (full checked path: /tmp/config/config.yaml).\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 1, + "subscribers_count": 2, + "topics": [], + "updated_at": 1725973311.0 + }, { "config_readme": null, "data_format": 2, @@ -37692,74 +37760,6 @@ var data = "topics": [], "updated_at": 1725377409.0 }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/process_features.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/classify_variants.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/bam_to_fastq.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/variant_calling.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/filtering.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/process_bam.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/salmon.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/setup.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/annotations.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpmd1x65hm/workflow/rules/query_vcfs.smk\": Formatted content is different from original\n[INFO] 11 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "nch-igm/VarRNA", - "latest_release": null, - "linting": "WorkflowError in file /tmp/tmpmd1x65hm/workflow/rules/setup.smk, line 8:\nWorkflow defines configfile ../config/config.yaml but it is not present or accessible (full checked path: /tmp/config/config.yaml).\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 1, - "subscribers_count": 2, - "topics": [], - "updated_at": 1725973311.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "An experimental nanopore only 16s Amplicon analysis ", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/guppy.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/nanoclust.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/emu.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpqwq3fr8_/workflow/Snakefile\": Keyword \"input\" at line 8 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/picrust2.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqwq3fr8_/workflow/rules/qc.smk\": Formatted content is different from original\n[INFO] 7 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "matinnuhamunada/ONT_16S_Analysis", - "latest_release": null, - "linting": "Lints for rule emu_copy_table (line 37, /tmp/tmpqwq3fr8_/workflow/rules/emu.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule get_nanoclust_db (line 1, /tmp/tmpqwq3fr8_/workflow/rules/nanoclust.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule get_nanoclust (line 15, /tmp/tmpqwq3fr8_/workflow/rules/nanoclust.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 1, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726141971.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/index.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/bam_cleaning.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpc7_2x2p7/workflow/rules/annotate_variants.smk\": Keyword \"input\" at line 16 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/annotate_variants.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/quantification.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/HLA_typing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/base_recalibration.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/pMHC.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpc7_2x2p7/workflow/rules/alignment.smk\": Formatted content is different from original\n[INFO] 10 file(s) would be changed \ud83d\ude2c\n[INFO] 3 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", - "full_name": "ctglab/ENEO", - "latest_release": null, - "linting": null, - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 1, - "subscribers_count": 2, - "topics": [], - "updated_at": 1726137283.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Opinionated viral metagenomics workflow", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp_s8amaf5/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "dhoconno/nvd", - "latest_release": null, - "linting": "KeyError in file /tmp/tmp_s8amaf5/workflow/Snakefile, line 31:\n\u0027run_id\u0027\n File \"/tmp/tmp_s8amaf5/workflow/Snakefile\", line 31, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 1, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726077730.0 - }, { "config_readme": null, "data_format": 2, @@ -43374,6 +43374,57 @@ var data = "topics": [], "updated_at": 1551028733.0 }, + { + "config_readme": null, + "data_format": 2, + "description": "Collection of rules performing QC and generating reports.", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/picard.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/peddy.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/gatk.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/mosdepth.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/samtools.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/rseqc.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/verifybamid2.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/multiqc.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/fastqc.smk\": Formatted content is different from original\n[INFO] 10 file(s) would be changed \ud83d\ude2c\n[INFO] 1 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", + "full_name": "hydra-genetics/qc", + "latest_release": "v0.5.0", + "linting": "ModuleNotFoundError in file /tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/common.smk, line 8:\nNo module named \u0027hydra_genetics\u0027\n File \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/common.smk\", line 8, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 2, + "subscribers_count": 4, + "topics": [], + "updated_at": 1726124209.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Code repository accompanying the manuscript, Symbiont loss and gain, rather than co-diversification shapes honeybee gut microbiota diversity and function", + "formatting": "[DEBUG] \n[ERROR] In file \"/tmp/tmp9gd0lmeu/Aiswarya-prasad-honeybee-cross-species-metagenomics-3d7912b/workflow/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 37:0: run = \"20[0-9]{6,6}\"\n```\n\n[INFO] In file \"/tmp/tmp9gd0lmeu/Aiswarya-prasad-honeybee-cross-species-metagenomics-3d7912b/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", + "full_name": "Aiswarya-prasad/honeybee-cross-species-metagenomics", + "latest_release": "v1.0.2", + "linting": "FileNotFoundError in file /tmp/tmp9gd0lmeu/Aiswarya-prasad-honeybee-cross-species-metagenomics-3d7912b/workflow/Snakefile, line 43:\n[Errno 2] No such file or directory: \u0027config/raw_file_paths.yaml\u0027\n File \"/tmp/tmp9gd0lmeu/Aiswarya-prasad-honeybee-cross-species-metagenomics-3d7912b/workflow/Snakefile\", line 43, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 2, + "subscribers_count": 2, + "topics": [], + "updated_at": 1725930556.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmprgr01pec/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "vshanka23/dgrp_gwas_final", + "latest_release": null, + "linting": "Lints for snakefile /tmp/tmprgr01pec/Snakefile:\n * Absolute path \"/inputs/pheno/\"+\" in line 3:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 9:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 10:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 11:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 12:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 13:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 14:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 18:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 19:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/pheno/\"+\" in line 20:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 22:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 23:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 24:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 25:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 26:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 28:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 48:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+config[\" in line 49:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 51:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+config[\" in line 68:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 69:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 71:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 76:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 96:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 98:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 100:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 121:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 122:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 123:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 125:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 131:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 154:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 156:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 157:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 158:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 159:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 160:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 162:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 163:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 174:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 178:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+config[\" in line 180:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 180:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 181:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Path composition with \u0027+\u0027 in line 1:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 49:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 51:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 68:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 71:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 76:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 96:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 98:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 100:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 121:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 122:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 131:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n\nLints for rule input_filter (line 16, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule convert_2_plink (line 66, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param BFILE is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule make_grm (line 94, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param BFILE is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param DEST is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule association (line 119, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param BFILE is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param DEST is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule post_processing (line 152, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule annotate (line 172, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param WORKDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 2, + "subscribers_count": 1, + "topics": [], + "updated_at": 1725907329.0 + }, { "config_readme": "\u003cp\u003eAdd datasets to datasets.tsv, and update config.yml to customize options.\u003c/p\u003e\n", "data_format": 2, @@ -44179,40 +44230,6 @@ var data = "topics": [], "updated_at": 1725446042.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "Code repository accompanying the manuscript, Symbiont loss and gain, rather than co-diversification shapes honeybee gut microbiota diversity and function", - "formatting": "[DEBUG] \n[ERROR] In file \"/tmp/tmp9gd0lmeu/Aiswarya-prasad-honeybee-cross-species-metagenomics-3d7912b/workflow/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 37:0: run = \"20[0-9]{6,6}\"\n```\n\n[INFO] In file \"/tmp/tmp9gd0lmeu/Aiswarya-prasad-honeybee-cross-species-metagenomics-3d7912b/workflow/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", - "full_name": "Aiswarya-prasad/honeybee-cross-species-metagenomics", - "latest_release": "v1.0.2", - "linting": "FileNotFoundError in file /tmp/tmp9gd0lmeu/Aiswarya-prasad-honeybee-cross-species-metagenomics-3d7912b/workflow/Snakefile, line 43:\n[Errno 2] No such file or directory: \u0027config/raw_file_paths.yaml\u0027\n File \"/tmp/tmp9gd0lmeu/Aiswarya-prasad-honeybee-cross-species-metagenomics-3d7912b/workflow/Snakefile\", line 43, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 2, - "subscribers_count": 2, - "topics": [], - "updated_at": 1725930556.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmprgr01pec/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "vshanka23/dgrp_gwas_final", - "latest_release": null, - "linting": "Lints for snakefile /tmp/tmprgr01pec/Snakefile:\n * Absolute path \"/inputs/pheno/\"+\" in line 3:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 9:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 10:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 11:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 12:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 13:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 14:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 18:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 19:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/pheno/\"+\" in line 20:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 22:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 23:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 24:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 25:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 26:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 28:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 48:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+config[\" in line 49:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 51:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+config[\" in line 68:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 69:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 71:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 76:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 96:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 98:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 100:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 121:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 122:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 123:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 125:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+config[\" in line 131:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 154:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 156:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 157:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 158:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 159:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 160:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 162:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 163:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 174:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 178:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+config[\" in line 180:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/\"+\" in line 180:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/{PHENO_UNIQ}/\"+\" in line 181:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Path composition with \u0027+\u0027 in line 1:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 49:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 51:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 68:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 71:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 76:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 96:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 98:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 100:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 121:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 122:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 131:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n\nLints for rule input_filter (line 16, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule convert_2_plink (line 66, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param BFILE is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule make_grm (line 94, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param BFILE is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param DEST is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule association (line 119, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param BFILE is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param DEST is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule post_processing (line 152, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule annotate (line 172, /tmp/tmprgr01pec/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Param WORKDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 2, - "subscribers_count": 1, - "topics": [], - "updated_at": 1725907329.0 - }, { "config_readme": null, "data_format": 2, @@ -44240,23 +44257,6 @@ var data = ], "updated_at": 1725813958.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "Collection of rules performing QC and generating reports.", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/picard.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/peddy.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/gatk.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/mosdepth.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/samtools.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/rseqc.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/verifybamid2.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/multiqc.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/fastqc.smk\": Formatted content is different from original\n[INFO] 10 file(s) would be changed \ud83d\ude2c\n[INFO] 1 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", - "full_name": "hydra-genetics/qc", - "latest_release": "v0.5.0", - "linting": "ModuleNotFoundError in file /tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/common.smk, line 8:\nNo module named \u0027hydra_genetics\u0027\n File \"/tmp/tmp08jix54y/hydra-genetics-qc-f44026e/workflow/rules/common.smk\", line 8, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 2, - "subscribers_count": 4, - "topics": [], - "updated_at": 1726124209.0 - }, { "config_readme": null, "data_format": 2, @@ -46910,6 +46910,136 @@ var data = ], "updated_at": 1617804426.0 }, + { + "config_readme": null, + "data_format": 2, + "description": "A model of the Illinois electricity system built with PyPSA.", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpbse_9wfv/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "ucsusa/pypsa-illinois", + "latest_release": null, + "linting": "ModuleNotFoundError in file /tmp/tmpbse_9wfv/Snakefile, line 5:\nNo module named \u0027dotenv\u0027\n File \"/tmp/tmpbse_9wfv/Snakefile\", line 5, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 3, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726083747.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Variant calling workflow for mixed ploidy samples implemented in Snakemake and using GATK4 ", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/0_indexreference.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpxs3nt0j_/workflow/rules/4_filter_bcftools.smk\": Keyword \"output\" at line 98 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/4_filter_bcftools.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/4_filter_gatk.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpxs3nt0j_/workflow/rules/1_mapreads.smk\": Keyword \"input\" at line 417 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/1_mapreads.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmpxs3nt0j_/workflow/rules/3_genotypeGVCF.smk\": SyntaxError: L130: Unrecognised keyword \u0027vcf_stats_GT_DP_multiallelic\u0027 in rule definition\n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/3_genotypeGVCF.smk\": \n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/2_callvars.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) raised parsing errors \ud83e\udd15\n[INFO] 6 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "jgerchen/polyploid_variant_calling", + "latest_release": null, + "linting": "KeyError in file /tmp/tmpxs3nt0j_/workflow/rules/0_indexreference.smk, line 6:\n\u0027input_fasta\u0027\n File \"/tmp/tmpxs3nt0j_/workflow/rules/0_indexreference.smk\", line 6, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 3, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726043720.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "This repository contains the source code for the datasets used in the implementation of the dengue lineage system in NextClade. The organizational workflow was developed based on the workflow created for the m-pox virus, available at: https://github.com/nextstrain/mpox/tree/master/nextclade.\"", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpb5lq7c6v/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "V-GEN-Lab/dengue-lineages-workflow", + "latest_release": null, + "linting": "Lints for snakefile /tmp/tmpb5lq7c6v/Snakefile:\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n\nLints for rule augur_ancestral (line 27, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule augur_translate (line 47, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule augur_clades (line 64, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule colors (line 83, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule augur_export (line 99, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule assemble_dataset (line 123, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule clean (line 151, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 3, + "subscribers_count": 0, + "topics": [], + "updated_at": 1725978690.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": null, + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/align.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/deseq2.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/plots.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/index.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/preprocess.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/quantification.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/qc.smk\": Formatted content is different from original\n[INFO] 9 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "cnio-bu/cluster_rnaseq", + "latest_release": null, + "linting": "/tmp/tmp3ge02rmx/Snakefile:89: SyntaxWarning: invalid escape sequence \u0027\\+\u0027\n covariates = list(set(covariates))\nWorkflowError in file /tmp/tmp3ge02rmx/Snakefile, line 13:\nWorkflow defines configfile config.yaml but it is not present or accessible (full checked path: /tmp/tmp3ge02rmx/config.yaml).\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 3, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726035940.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "A comprehensive quality-control and quantification RNA-seq pipeline", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/build.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/nidap.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/paired-end.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/single-end.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/group-info.smk\": Formatted content is different from original\n[INFO] 7 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "CCBR/RENEE", + "latest_release": "v2.6.0", + "linting": "WorkflowError in file /tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/Snakefile, line 20:\nWorkflow defines configfile config.json but it is not present or accessible (full checked path: /tmp/tmplewoie5z/CCBR-RENEE-b7f3297/config.json).\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 3, + "subscribers_count": 3, + "topics": [], + "updated_at": 1725922872.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Workflow for preprocessing cfDNA samples.", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/bam_to_fastq.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/reference.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/extract_signals.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\.\u0027\n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/GC_bias.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/ichorCNA.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/trimming.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/QualityControl.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/filtering.smk\": Formatted content is different from original\n[INFO] 9 file(s) would be changed \ud83d\ude2c\n[INFO] 2 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", + "full_name": "kircherlab/cfDNA-UniFlow", + "latest_release": null, + "linting": "WorkflowError in file /tmp/tmpu82uazxk/workflow/rules/common.smk, line 9:\nWorkflow defines configfile config/config.yaml but it is not present or accessible (full checked path: /tmp/tmpu82uazxk/config/config.yaml).\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 3, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726116418.0 + }, + { + "config_readme": "", + "data_format": 2, + "description": "Snakemake workflow for DamID-Seq analysis", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/trimming.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/bedgraph_processing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/bed.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/deeptools.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/fastqc.smk\": Keyword \"shell\" at line 36 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/fastqc.smk\": Keyword \"shell\" at line 80 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/fastqc.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/plotting.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/motifs.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/resources.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/peak_calling.smk\": Keyword \"shell\" at line 439 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/peak_calling.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/damid.smk\": Formatted content is different from original\n[INFO] 11 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "niekwit/damid-seq", + "latest_release": "v0.5.0", + "linting": "Workflow version: v0.5.0\nWrapper version: v3.10.2\nValueError in file /tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/scripts/general_functions.smk, line 478:\nNumber of overlapping peaks to keep consensus peaks (config \u003e consensus_peak \u003e keep) is greater than number of subdirectories in reads/...\n File \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/Snakefile\", line 26, in \u003cmodule\u003e\n File \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/scripts/general_functions.smk\", line 478, in check_consensus_peak_settings\n", + "mandatory_flags": { + "desc": null, + "flags": null + }, + "report": true, + "software_stack_deployment": { + "conda": true, + "singularity": false, + "singularity+conda": false + }, + "standardized": true, + "stargazers_count": 3, + "subscribers_count": 1, + "topics": [ + "bioinformatics-pipeline", + "damid", + "snakemake-workflow" + ], + "updated_at": 1725892884.0 + }, { "config_readme": null, "data_format": 2, @@ -47284,136 +47414,6 @@ var data = "topics": [], "updated_at": 1725479591.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "Variant calling workflow for mixed ploidy samples implemented in Snakemake and using GATK4 ", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/0_indexreference.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpxs3nt0j_/workflow/rules/4_filter_bcftools.smk\": Keyword \"output\" at line 98 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/4_filter_bcftools.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/4_filter_gatk.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpxs3nt0j_/workflow/rules/1_mapreads.smk\": Keyword \"input\" at line 417 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/1_mapreads.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmpxs3nt0j_/workflow/rules/3_genotypeGVCF.smk\": SyntaxError: L130: Unrecognised keyword \u0027vcf_stats_GT_DP_multiallelic\u0027 in rule definition\n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/3_genotypeGVCF.smk\": \n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/rules/2_callvars.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpxs3nt0j_/workflow/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) raised parsing errors \ud83e\udd15\n[INFO] 6 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "jgerchen/polyploid_variant_calling", - "latest_release": null, - "linting": "KeyError in file /tmp/tmpxs3nt0j_/workflow/rules/0_indexreference.smk, line 6:\n\u0027input_fasta\u0027\n File \"/tmp/tmpxs3nt0j_/workflow/rules/0_indexreference.smk\", line 6, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 3, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726043720.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "This repository contains the source code for the datasets used in the implementation of the dengue lineage system in NextClade. The organizational workflow was developed based on the workflow created for the m-pox virus, available at: https://github.com/nextstrain/mpox/tree/master/nextclade.\"", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpb5lq7c6v/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "V-GEN-Lab/dengue-lineages-workflow", - "latest_release": null, - "linting": "Lints for snakefile /tmp/tmpb5lq7c6v/Snakefile:\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n\nLints for rule augur_ancestral (line 27, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule augur_translate (line 47, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule augur_clades (line 64, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule colors (line 83, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule augur_export (line 99, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule assemble_dataset (line 123, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule clean (line 151, /tmp/tmpb5lq7c6v/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 3, - "subscribers_count": 0, - "topics": [], - "updated_at": 1725978690.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": null, - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/align.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/deseq2.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/plots.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/index.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/preprocess.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/quantification.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp3ge02rmx/rules/qc.smk\": Formatted content is different from original\n[INFO] 9 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "cnio-bu/cluster_rnaseq", - "latest_release": null, - "linting": "/tmp/tmp3ge02rmx/Snakefile:89: SyntaxWarning: invalid escape sequence \u0027\\+\u0027\n covariates = list(set(covariates))\nWorkflowError in file /tmp/tmp3ge02rmx/Snakefile, line 13:\nWorkflow defines configfile config.yaml but it is not present or accessible (full checked path: /tmp/tmp3ge02rmx/config.yaml).\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 3, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726035940.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "A comprehensive quality-control and quantification RNA-seq pipeline", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/build.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/nidap.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/paired-end.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/single-end.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/rules/group-info.smk\": Formatted content is different from original\n[INFO] 7 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "CCBR/RENEE", - "latest_release": "v2.6.0", - "linting": "WorkflowError in file /tmp/tmplewoie5z/CCBR-RENEE-b7f3297/workflow/Snakefile, line 20:\nWorkflow defines configfile config.json but it is not present or accessible (full checked path: /tmp/tmplewoie5z/CCBR-RENEE-b7f3297/config.json).\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 3, - "subscribers_count": 3, - "topics": [], - "updated_at": 1725922872.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Workflow for preprocessing cfDNA samples.", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/bam_to_fastq.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/reference.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/extract_signals.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\.\u0027\n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/GC_bias.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/ichorCNA.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/trimming.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/QualityControl.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmpu82uazxk/workflow/rules/filtering.smk\": Formatted content is different from original\n[INFO] 9 file(s) would be changed \ud83d\ude2c\n[INFO] 2 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", - "full_name": "kircherlab/cfDNA-UniFlow", - "latest_release": null, - "linting": "WorkflowError in file /tmp/tmpu82uazxk/workflow/rules/common.smk, line 9:\nWorkflow defines configfile config/config.yaml but it is not present or accessible (full checked path: /tmp/tmpu82uazxk/config/config.yaml).\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 3, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726116418.0 - }, - { - "config_readme": "", - "data_format": 2, - "description": "Snakemake workflow for DamID-Seq analysis", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/trimming.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/bedgraph_processing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/bed.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/deeptools.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/fastqc.smk\": Keyword \"shell\" at line 36 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/fastqc.smk\": Keyword \"shell\" at line 80 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/fastqc.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/plotting.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/motifs.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/resources.smk\": Formatted content is different from original\n[DEBUG] \n[WARNING] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/peak_calling.smk\": Keyword \"shell\" at line 439 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/peak_calling.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/rules/damid.smk\": Formatted content is different from original\n[INFO] 11 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "niekwit/damid-seq", - "latest_release": "v0.5.0", - "linting": "Workflow version: v0.5.0\nWrapper version: v3.10.2\nValueError in file /tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/scripts/general_functions.smk, line 478:\nNumber of overlapping peaks to keep consensus peaks (config \u003e consensus_peak \u003e keep) is greater than number of subdirectories in reads/...\n File \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/Snakefile\", line 26, in \u003cmodule\u003e\n File \"/tmp/tmpanmb7tc4/niekwit-damid-seq-680e672/workflow/scripts/general_functions.smk\", line 478, in check_consensus_peak_settings\n", - "mandatory_flags": { - "desc": null, - "flags": null - }, - "report": true, - "software_stack_deployment": { - "conda": true, - "singularity": false, - "singularity+conda": false - }, - "standardized": true, - "stargazers_count": 3, - "subscribers_count": 1, - "topics": [ - "bioinformatics-pipeline", - "damid", - "snakemake-workflow" - ], - "updated_at": 1725892884.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "A model of the Illinois electricity system built with PyPSA.", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpbse_9wfv/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "ucsusa/pypsa-illinois", - "latest_release": null, - "linting": "ModuleNotFoundError in file /tmp/tmpbse_9wfv/Snakefile, line 5:\nNo module named \u0027dotenv\u0027\n File \"/tmp/tmpbse_9wfv/Snakefile\", line 5, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 3, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726083747.0 - }, { "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eConfiguration options\u003c/h1\u003e\u003ca id=\"user-content-configuration-options\" class=\"anchor\" aria-label=\"Permalink: Configuration options\" href=\"#configuration-options\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003edefrabb uses two configuration files\u003c/p\u003e\n\u003cp\u003eSee \u003ccode\u003eschema/analyses-schema.yml\u003c/code\u003e and \u003ccode\u003eschema/resources-schema.yml\u003c/code\u003e for detailed descriptions and field formats requirements.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eresource.yaml\u003c/h2\u003e\u003ca id=\"user-content-resourceyaml\" class=\"anchor\" aria-label=\"Permalink: resource.yaml\" href=\"#resourceyaml\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eused to define:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eparameters, threads, and memory for compute intensive steps\u003c/li\u003e\n\u003cli\u003eurls for remote files: diploid assemblies, genome reference files, stratifications, and callsets used to evaluate draft benchmark\u003c/li\u003e\n\u003cli\u003eexclusion sets and how they are applied\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eAnalyses Tables\u003c/h2\u003e\u003ca id=\"user-content-analyses-tables\" class=\"anchor\" aria-label=\"Permalink: Analyses Tables\" href=\"#analyses-tables\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eProvides run specific configurations\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003einput diploid assembly\u003c/li\u003e\n\u003cli\u003eversion of reference genome\u003c/li\u003e\n\u003cli\u003eassembly-based variant caller and parameters\u003c/li\u003e\n\u003cli\u003evcf and bed processing including what exclusions to use\u003c/li\u003e\n\u003cli\u003ebenchmarking method and comparison callset used for initial evaluation\u003c/li\u003e\n\u003c/ul\u003e\n", "data_format": 2, @@ -48978,6 +48978,40 @@ var data = "topics": [], "updated_at": 1616871931.0 }, + { + "config_readme": null, + "data_format": 2, + "description": "Snakemake workflow for the preprocessing, alignment, QC, and quantification of spatial transcriptomics data", + "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmpnbreyozq/rules/6a_scanpy_init.smk\": Keyword \"input\" at line 8 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpnbreyozq/rules/6a_scanpy_init.smk\": Keyword \"params\" at line 12 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpnbreyozq/rules/6a_scanpy_init.smk\": Keyword \"input\" at line 126 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] \n[DEBUG] \n[ERROR] In file \"/tmp/tmpnbreyozq/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 249:0: EOF in multi-line statement\n```\n\n[DEBUG] In file \"/tmp/tmpnbreyozq/Snakefile\": \n[DEBUG] In file \"/tmp/tmpnbreyozq/rules/0_utils.smk\": Formatted content is different from original\n[INFO] 1 file(s) raised parsing errors \ud83e\udd15\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n[INFO] 2 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", + "full_name": "mckellardw/slide_snake", + "latest_release": null, + "linting": "Using workflow specific profile profiles/default for setting default command line arguments.\nLints for snakefile /tmp/tmpnbreyozq/rules/0_utils.smk:\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n\nLints for snakefile /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk:\n * Path composition with \u0027+\u0027 in line 7:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 75:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n\nLints for rule utils_index_BAM (line 5, /tmp/tmpnbreyozq/rules/0_utils.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule BC_copy_barcode_map (line 5, /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule BC_get_simple_whitelist (line 29, /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule BC_write_whitelist_variants (line 47, /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule BC_insert_adapter_into_list (line 139, /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_1a_merge_fastqs (line 2, /tmp/tmpnbreyozq/rules/short_read/1a_mergefqs.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_1b_R1_hardTrimming (line 136, /tmp/tmpnbreyozq/rules/short_read/1b_trimming.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_1b_R1_internalTrimming (line 170, /tmp/tmpnbreyozq/rules/short_read/1b_trimming.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_1c_fastQC_preTrim (line 5, /tmp/tmpnbreyozq/rules/short_read/1c_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_1c_fastQC_postTrim (line 30, /tmp/tmpnbreyozq/rules/short_read/1c_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_1c_fastQC_twiceTrim (line 54, /tmp/tmpnbreyozq/rules/short_read/1c_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_2a_bwa_rRNA_filter_R1 (line 60, /tmp/tmpnbreyozq/rules/short_read/2a_rRNA_bwa.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2a_bwa_rRNA_compress_unmapped (line 82, /tmp/tmpnbreyozq/rules/short_read/2a_rRNA_bwa.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2a_bwa_rRNA_filtered_fastqc (line 99, /tmp/tmpnbreyozq/rules/short_read/2a_rRNA_bwa.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_2b_STAR_rRNA_align (line 7, /tmp/tmpnbreyozq/rules/short_read/2b_rRNA_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_2b_STAR_rRNA_compress_outs (line 63, /tmp/tmpnbreyozq/rules/short_read/2b_rRNA_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n * Param GENEDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule ilmn_2b_STAR_rRNA_rename_compress_unmapped (line 99, /tmp/tmpnbreyozq/rules/short_read/2b_rRNA_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_2b_STAR_rRNA_filtered_fastqc (line 122, /tmp/tmpnbreyozq/rules/short_read/2b_rRNA_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_2c_qualimapQC_rRNA_STAR (line 37, /tmp/tmpnbreyozq/rules/short_read/2c_rRNA_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_2c_qualimap_summary2csv_rRNA_STAR (line 66, /tmp/tmpnbreyozq/rules/short_read/2c_rRNA_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_get_noRibo_list (line 40, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_gunzip_R1 (line 54, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_filter_R1 (line 66, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_filter_R1_internalTrim (line 82, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_filter_R1_hardTrim (line 98, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_compress_fqs (line 114, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_3a_STARsolo_align (line 9, /tmp/tmpnbreyozq/rules/short_read/3a_star_align.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_3a_compress_STAR_outs (line 68, /tmp/tmpnbreyozq/rules/short_read/3a_star_align.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n * Param VELDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param GENEDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param GENEFULLDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule ilmn_3b_fastqc_unmapped (line 7, /tmp/tmpnbreyozq/rules/short_read/3b_star_unmapped.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_3c_strand_split_dedup_bam (line 30, /tmp/tmpnbreyozq/rules/short_read/3c_star_dedup.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_3d_qualimapQC_STAR (line 3, /tmp/tmpnbreyozq/rules/short_read/3d_star_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_3d_qualimap_summary2csv_STAR (line 31, /tmp/tmpnbreyozq/rules/short_read/3d_star_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_4a_kbpython_std_remove_suffix (line 65, /tmp/tmpnbreyozq/rules/short_read/4a_kbpython.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule kbpython_std_compress_outs (line 81, /tmp/tmpnbreyozq/rules/short_read/4a_kbpython.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_merge_formats (line 5, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_readIDs_by_adapter_type (line 83, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_adapter_scan_results (line 107, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_merge_scan_lists (line 129, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_subset_fastq_by_adapter_type (line 146, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_compress_merged_fq (line 175, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1b_R1_hardTrimming (line 78, /tmp/tmpnbreyozq/rules/ont/1b_trimming.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1b_R1_internalTrim (line 108, /tmp/tmpnbreyozq/rules/ont/1b_trimming.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1c_fastq_call_bc_from_adapter (line 3, /tmp/tmpnbreyozq/rules/ont/1c_barcode_calling.smk):\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ont_1c_filter_read_barcodes (line 64, /tmp/tmpnbreyozq/rules/ont/1c_barcode_calling.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1c_tsv_bc_correction (line 79, /tmp/tmpnbreyozq/rules/ont/1c_barcode_calling.smk):\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ont_1d_sort_compress_output (line 44, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_add_featureCounts_to_bam (line 136, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_add_corrected_barcodes (line 165, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_add_umis (line 193, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_filter_bam_empty_tags (line 221, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_counts_to_sparse (line 277, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_clipBeforeSTAR (line 4, /tmp/tmpnbreyozq/rules/ont/1d_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_STARsolo_align (line 27, /tmp/tmpnbreyozq/rules/ont/1d_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ont_compress_STAR_outs (line 119, /tmp/tmpnbreyozq/rules/ont/1d_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n * Param VELDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param GENEFULLDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule ont_2_qualimap_minimap2 (line 2, /tmp/tmpnbreyozq/rules/ont/2_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_2_qualimap_STAR (line 31, /tmp/tmpnbreyozq/rules/ont/2_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_2_qualimap_summary2csv (line 60, /tmp/tmpnbreyozq/rules/ont/2_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_0_rawInput (line 2, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_1_preCutadapt (line 26, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_2_postCutadapt (line 50, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_3_bam (line 89, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule readQC_downsample (line 116, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_summaryplot (line 133, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_fastQC_preTrim (line 2, /tmp/tmpnbreyozq/rules/ont/2_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_fastQC_preCutadapt (line 25, /tmp/tmpnbreyozq/rules/ont/2_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_fastQC_postCutadapt (line 48, /tmp/tmpnbreyozq/rules/ont/2_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 4, + "subscribers_count": 3, + "topics": [], + "updated_at": 1726069701.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Knowledge Graph generator for WorkflowHub", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmph7kt2vg4/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "workflowhub-eu/workflowhub-graph", + "latest_release": null, + "linting": "Lints for rule source_ro_crates (line 12, /tmp/tmph7kt2vg4/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable OUTPUT_DIRS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable VERSIONS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable OUTPUT_DIRS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable VERSIONS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable OUTPUT_DIRS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule report_created_files (line 35, /tmp/tmph7kt2vg4/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule merge_files (line 44, /tmp/tmph7kt2vg4/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule create_ro_crate (line 70, /tmp/tmph7kt2vg4/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 4, + "subscribers_count": 9, + "topics": [], + "updated_at": 1726052509.0 + }, { "config_readme": null, "data_format": 2, @@ -49223,40 +49257,6 @@ var data = "topics": [], "updated_at": 1725438827.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "Knowledge Graph generator for WorkflowHub", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmph7kt2vg4/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "workflowhub-eu/workflowhub-graph", - "latest_release": null, - "linting": "Lints for rule source_ro_crates (line 12, /tmp/tmph7kt2vg4/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable OUTPUT_DIRS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable VERSIONS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable OUTPUT_DIRS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable VERSIONS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable OUTPUT_DIRS from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule report_created_files (line 35, /tmp/tmph7kt2vg4/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule merge_files (line 44, /tmp/tmph7kt2vg4/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule create_ro_crate (line 70, /tmp/tmph7kt2vg4/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 4, - "subscribers_count": 9, - "topics": [], - "updated_at": 1726052509.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Snakemake workflow for the preprocessing, alignment, QC, and quantification of spatial transcriptomics data", - "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmpnbreyozq/rules/6a_scanpy_init.smk\": Keyword \"input\" at line 8 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpnbreyozq/rules/6a_scanpy_init.smk\": Keyword \"params\" at line 12 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpnbreyozq/rules/6a_scanpy_init.smk\": Keyword \"input\" at line 126 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] \n[DEBUG] \n[ERROR] In file \"/tmp/tmpnbreyozq/Snakefile\": InvalidPython: Black error:\n```\nCannot parse: 249:0: EOF in multi-line statement\n```\n\n[DEBUG] In file \"/tmp/tmpnbreyozq/Snakefile\": \n[DEBUG] In file \"/tmp/tmpnbreyozq/rules/0_utils.smk\": Formatted content is different from original\n[INFO] 1 file(s) raised parsing errors \ud83e\udd15\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n[INFO] 2 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", - "full_name": "mckellardw/slide_snake", - "latest_release": null, - "linting": "Using workflow specific profile profiles/default for setting default command line arguments.\nLints for snakefile /tmp/tmpnbreyozq/rules/0_utils.smk:\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n\nLints for snakefile /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk:\n * Path composition with \u0027+\u0027 in line 7:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 75:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n\nLints for rule utils_index_BAM (line 5, /tmp/tmpnbreyozq/rules/0_utils.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule BC_copy_barcode_map (line 5, /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule BC_get_simple_whitelist (line 29, /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule BC_write_whitelist_variants (line 47, /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule BC_insert_adapter_into_list (line 139, /tmp/tmpnbreyozq/rules/0a_barcode_maps.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_1a_merge_fastqs (line 2, /tmp/tmpnbreyozq/rules/short_read/1a_mergefqs.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_1b_R1_hardTrimming (line 136, /tmp/tmpnbreyozq/rules/short_read/1b_trimming.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_1b_R1_internalTrimming (line 170, /tmp/tmpnbreyozq/rules/short_read/1b_trimming.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_1c_fastQC_preTrim (line 5, /tmp/tmpnbreyozq/rules/short_read/1c_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_1c_fastQC_postTrim (line 30, /tmp/tmpnbreyozq/rules/short_read/1c_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_1c_fastQC_twiceTrim (line 54, /tmp/tmpnbreyozq/rules/short_read/1c_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_2a_bwa_rRNA_filter_R1 (line 60, /tmp/tmpnbreyozq/rules/short_read/2a_rRNA_bwa.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2a_bwa_rRNA_compress_unmapped (line 82, /tmp/tmpnbreyozq/rules/short_read/2a_rRNA_bwa.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2a_bwa_rRNA_filtered_fastqc (line 99, /tmp/tmpnbreyozq/rules/short_read/2a_rRNA_bwa.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_2b_STAR_rRNA_align (line 7, /tmp/tmpnbreyozq/rules/short_read/2b_rRNA_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_2b_STAR_rRNA_compress_outs (line 63, /tmp/tmpnbreyozq/rules/short_read/2b_rRNA_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n * Param GENEDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule ilmn_2b_STAR_rRNA_rename_compress_unmapped (line 99, /tmp/tmpnbreyozq/rules/short_read/2b_rRNA_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_2b_STAR_rRNA_filtered_fastqc (line 122, /tmp/tmpnbreyozq/rules/short_read/2b_rRNA_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_2c_qualimapQC_rRNA_STAR (line 37, /tmp/tmpnbreyozq/rules/short_read/2c_rRNA_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_2c_qualimap_summary2csv_rRNA_STAR (line 66, /tmp/tmpnbreyozq/rules/short_read/2c_rRNA_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_get_noRibo_list (line 40, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_gunzip_R1 (line 54, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_filter_R1 (line 66, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_filter_R1_internalTrim (line 82, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_filter_R1_hardTrim (line 98, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_2d_ribodetector_compress_fqs (line 114, /tmp/tmpnbreyozq/rules/short_read/2d_ribodetector.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_3a_STARsolo_align (line 9, /tmp/tmpnbreyozq/rules/short_read/3a_star_align.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ilmn_3a_compress_STAR_outs (line 68, /tmp/tmpnbreyozq/rules/short_read/3a_star_align.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n * Param VELDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param GENEDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param GENEFULLDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule ilmn_3b_fastqc_unmapped (line 7, /tmp/tmpnbreyozq/rules/short_read/3b_star_unmapped.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_3c_strand_split_dedup_bam (line 30, /tmp/tmpnbreyozq/rules/short_read/3c_star_dedup.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_3d_qualimapQC_STAR (line 3, /tmp/tmpnbreyozq/rules/short_read/3d_star_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ilmn_3d_qualimap_summary2csv_STAR (line 31, /tmp/tmpnbreyozq/rules/short_read/3d_star_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ilmn_4a_kbpython_std_remove_suffix (line 65, /tmp/tmpnbreyozq/rules/short_read/4a_kbpython.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule kbpython_std_compress_outs (line 81, /tmp/tmpnbreyozq/rules/short_read/4a_kbpython.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_merge_formats (line 5, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_readIDs_by_adapter_type (line 83, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_adapter_scan_results (line 107, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_merge_scan_lists (line 129, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_subset_fastq_by_adapter_type (line 146, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1a_compress_merged_fq (line 175, /tmp/tmpnbreyozq/rules/ont/1a_preprocessing.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1b_R1_hardTrimming (line 78, /tmp/tmpnbreyozq/rules/ont/1b_trimming.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1b_R1_internalTrim (line 108, /tmp/tmpnbreyozq/rules/ont/1b_trimming.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1c_fastq_call_bc_from_adapter (line 3, /tmp/tmpnbreyozq/rules/ont/1c_barcode_calling.smk):\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ont_1c_filter_read_barcodes (line 64, /tmp/tmpnbreyozq/rules/ont/1c_barcode_calling.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1c_tsv_bc_correction (line 79, /tmp/tmpnbreyozq/rules/ont/1c_barcode_calling.smk):\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ont_1d_sort_compress_output (line 44, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_add_featureCounts_to_bam (line 136, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_add_corrected_barcodes (line 165, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_add_umis (line 193, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_filter_bam_empty_tags (line 221, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_1d_counts_to_sparse (line 277, /tmp/tmpnbreyozq/rules/ont/1d_minimap2.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_clipBeforeSTAR (line 4, /tmp/tmpnbreyozq/rules/ont/1d_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_STARsolo_align (line 27, /tmp/tmpnbreyozq/rules/ont/1d_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n\nLints for rule ont_compress_STAR_outs (line 119, /tmp/tmpnbreyozq/rules/ont/1d_STAR.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Migrate long run directives into scripts or notebooks:\n Long run directives hamper workflow readability. Use the script or\n notebook directive instead. Note that the script or notebook directive\n does not involve boilerplate. Similar to run, you will have direct access\n to params, input, output, and wildcards.Only use the run directive for a\n handful of lines.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#external-scripts\n https://snakemake.readthedocs.io/en/latest/snakefiles/rules.html#jupyter-notebook-integration\n * Param VELDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n * Param GENEFULLDIR is a prefix of input or output file but hardcoded:\n If this is meant to represent a file path prefix, it will fail when\n running workflow in environments without a shared filesystem. Instead,\n provide a function that infers the appropriate prefix from the input or\n output file, e.g.: lambda w, input: os.path.splitext(input[0])[0]\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n https://snakemake.readthedocs.io/en/stable/tutorial/advanced.html#tutorial-input-functions\n\nLints for rule ont_2_qualimap_minimap2 (line 2, /tmp/tmpnbreyozq/rules/ont/2_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_2_qualimap_STAR (line 31, /tmp/tmpnbreyozq/rules/ont/2_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_2_qualimap_summary2csv (line 60, /tmp/tmpnbreyozq/rules/ont/2_qualimap.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_0_rawInput (line 2, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_1_preCutadapt (line 26, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_2_postCutadapt (line 50, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_3_bam (line 89, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule readQC_downsample (line 116, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule ont_readQC_summaryplot (line 133, /tmp/tmpnbreyozq/rules/ont/2_read_qc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_fastQC_preTrim (line 2, /tmp/tmpnbreyozq/rules/ont/2_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_fastQC_preCutadapt (line 25, /tmp/tmpnbreyozq/rules/ont/2_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule ont_fastQC_postCutadapt (line 48, /tmp/tmpnbreyozq/rules/ont/2_fastqc.smk):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 4, - "subscribers_count": 3, - "topics": [], - "updated_at": 1726069701.0 - }, { "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eGeneral Settings\u003c/h1\u003e\u003ca id=\"user-content-general-settings\" class=\"anchor\" aria-label=\"Permalink: General Settings\" href=\"#general-settings\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eIn the \u003ca href=\"./\"\u003e\u003ccode\u003econfig\u003c/code\u003e\u003c/a\u003e directory, you will find template configuration files\nfor this pipeline. For your run, you\u0027ll need to edit \u003ccode\u003esamples.tsv\u003c/code\u003e,\n\u003ccode\u003eunits.tsv\u003c/code\u003e, and \u003ccode\u003econfig.yaml\u003c/code\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eSamples list\u003c/h2\u003e\u003ca id=\"user-content-samples-list\" class=\"anchor\" aria-label=\"Permalink: Samples list\" href=\"#samples-list\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eAll your samples should be listed in \u003ccode\u003esamples.tsv\u003c/code\u003e. Samples preceded with a \u003ccode\u003e#\u003c/code\u003e\nwill not be included, this can be useful if you want to exclude a sample after\nquality checking.\u003c/p\u003e\n\u003cp\u003eEach sample must have four columns filled in the sample sheet. The columns are\ntab separated:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003esample\u003c/code\u003e - The name of the sample\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003epopulation\u003c/code\u003e - The populations you will group your samples into. These are\nthe groups that population level stats are calculated on.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003etime\u003c/code\u003e - This should be either \u003ccode\u003emodern\u003c/code\u003e or \u003ccode\u003ehistorical\u003c/code\u003e, the only thing this\nwill affect is whether or not your bam files will be corrected for\npost-mortem damage or not.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003edepth\u003c/code\u003e - This is only used for depth filtering. Extreme levels (both high\nand low) will be calculated for each group you put here as well as the\ndataset as a whole, and all will be filtered out for all analyses. Any string\ncan be used for this. If all samples are sequenced to roughly similar depths,\nall can have the same value. If some are low coverage, and some higher,\nsimply using \u0027low\u0027 and \u0027high\u0027 on the corresponding samples is sufficient.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eThe values in the sample list will end up in filenames, so ensure that they are\nonly characters permitted in filenames on your system.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eUnits list\u003c/h2\u003e\u003ca id=\"user-content-units-list\" class=\"anchor\" aria-label=\"Permalink: Units list\" href=\"#units-list\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eAll your raw data will be pointed to in \u003ccode\u003eunits.tsv\u003c/code\u003e.\u003c/p\u003e\n\u003cp\u003eEach sample must have five columns filled in the units sheet. The columns are\ntab separated:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003esample\u003c/code\u003e - The sample name, same as in \u003ccode\u003esamples.tsv\u003c/code\u003e.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eunit\u003c/code\u003e - This is used to fill out the \u003ccode\u003eID\u003c/code\u003e read group in the bam file. It\nmust be unique to each read group, so the same sample shouldn\u0027t have the\nsame unit for more than one sequencing run. A good format might be\n\u003ccode\u003esequencer_barcode.lane\u003c/code\u003e. Optical duplicates will be removed within units.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003elib\u003c/code\u003e - This is used to fill out the \u003ccode\u003eLB\u003c/code\u003e read group. This should be a unique\nidentifier for each sample library. Sequencing runs from the same library,\nbut different runs, will have the same value in \u003ccode\u003elib\u003c/code\u003e, but different in\n\u003ccode\u003eunit\u003c/code\u003e.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eplatform\u003c/code\u003e - This is used to fill out the \u003ccode\u003ePL\u003c/code\u003e read group. Put what you\u0027d\nwant there. Usually \u003ccode\u003eILLUMINA\u003c/code\u003e for Illumina platforms.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003efq1\u003c/code\u003e and \u003ccode\u003efq2\u003c/code\u003e - The absolute or relative paths from the working directory\nto the raw fastq files for the sample. Currently the pipeline only supports\npaired-end sequencing, single end may be added down the line.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eConfiguration file\u003c/h2\u003e\u003ca id=\"user-content-configuration-file\" class=\"anchor\" aria-label=\"Permalink: Configuration file\" href=\"#configuration-file\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003e\u003ccode\u003econfig.yaml\u003c/code\u003e contains the configuration for the workflow, this is where you\nwill put what analyses, filters, and options you want. Below I describe the\nconfiguration options. The \u003ca href=\"config.yaml\"\u003e\u003ccode\u003econfig.yaml\u003c/code\u003e\u003c/a\u003e in this repository\nserves as a template, but includes some \u0027default\u0027 parameters that may be good\nstarting points for some users. If \u003ccode\u003e--configfile\u003c/code\u003e is not specified in the\nsnakemake command, the workflow will default to \u003ccode\u003econfig/config.yaml\u003c/code\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eConfiguration options\u003c/h3\u003e\u003ca id=\"user-content-configuration-options\" class=\"anchor\" aria-label=\"Permalink: Configuration options\" href=\"#configuration-options\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eDataset Configuration\u003c/h4\u003e\u003ca id=\"user-content-dataset-configuration\" class=\"anchor\" aria-label=\"Permalink: Dataset Configuration\" href=\"#dataset-configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eRequired configuration of the \u0027dataset\u0027.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003esamples:\u003c/code\u003e An absolute or relative path from the working directory to the\n\u003ccode\u003esamples.tsv\u003c/code\u003e file.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eunits:\u003c/code\u003e An absolute or relative path from the working directory to the\n\u003ccode\u003eunits.tsv\u003c/code\u003e file.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003edataset:\u003c/code\u003e A name for this dataset run - essentially, an identifier for a\nbatch of samples to be analysed together with the same configuration.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eHere, dataset means a set of samples and configurations that the workflow will\nbe run with. Each dataset should have its own \u003ccode\u003esamples.tsv\u003c/code\u003e and \u003ccode\u003econfig.yaml\u003c/code\u003e,\nbut the same \u003ccode\u003eunits.tsv\u003c/code\u003e can be used for multiple if you prefer. Essentially,\nwhat the dataset identifier does is keeps your outputs organized into projects,\nso that the same BAM files can be used in multiple datasets without having to\nbe remade.\u003c/p\u003e\n\u003cp\u003eSo, say you have \u003ccode\u003edataset1_samples.tsv\u003c/code\u003e and \u003ccode\u003edataset2_samples.tsv\u003c/code\u003e, with\ncorresponding \u003ccode\u003edataset1_config.tsv\u003c/code\u003e and \u003ccode\u003edataset2_config.yaml\u003c/code\u003e. The sample\nfiles contain different samples, though some are shared between the datasets.\nThe workflow for dataset1 can be run, and then dataset2 can be run. When\ndataset2 runs, it map new samples, but won\u0027t re-map samples processed in\ndataset1. Each will perform downstream analyses independently with their sample\nset and configuration files, storing these results in dataset specific folders.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eReference Configuration\u003c/h4\u003e\u003ca id=\"user-content-reference-configuration\" class=\"anchor\" aria-label=\"Permalink: Reference Configuration\" href=\"#reference-configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eRequired configuration of the reference.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003ccode\u003echunk_size:\u003c/code\u003e A size in bp (integer). Your reference will be analyzed in\n\u0027chunks\u0027 of contigs of this size to parallelize processing. This size should\nbe larger than the largest contig in your genome. A larger number means fewer\njobs that run longer. A smaller number means more jobs that run shorter. The\nbest fit will depend on the reference and the compute resources you have\navailable. Leaving this blank will not divide the reference up into chunks\n(but this isn\u0027t optimized yet, so it will do a couple unnecessary steps).\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003ccode\u003ereference:\u003c/code\u003e\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003ename:\u003c/code\u003e A name for your reference genome, will go in the file names.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003efasta:\u003c/code\u003e A path to the reference fasta file (currently only supports\nuncompressed fasta files)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003emito:\u003c/code\u003e Mitochondrial contig name(s), will be removed from analysis. Should\nbe listed within brackets []\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003esex-linked:\u003c/code\u003e Sex-linked contig name(s), will be removed from analysis.\nShould be listed within brackets []\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eexclude:\u003c/code\u003e Additional contig name(s) to exclude from analysis. Should be\nlisted within brackets []\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003emin_size:\u003c/code\u003e A size in bp (integer). All contigs below this size will be\nexcluded from analysis.\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eReference genomes should be uncompressed, and contig names should be clear and\nconcise. Currently, there are some issues parsing contig names with\nunderscores, so please change these in your reference before running the\npipeline. Alphanumeric characters, as well as \u003ccode\u003e.\u003c/code\u003e in contig names have been\ntested to work so far, other symbols have not been tested.\u003c/p\u003e\n\u003cp\u003ePotentially the ability to use bgzipped genomes will be added, I just need to\ncheck that it works with all underlying tools. Currently, it will for sure not\nwork, as calculating chunks is hard-coded to work on an uncompressed genome.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eSample Set Configuration\u003c/h4\u003e\u003ca id=\"user-content-sample-set-configuration\" class=\"anchor\" aria-label=\"Permalink: Sample Set Configuration\" href=\"#sample-set-configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eThis will exclude individuals from analysis that are listed in the sample list.\nThis may be useful if you run the workflow and find a poor quality sample, and\nwant to re-run without it. Or if you have relatives in the dataset and you want\nto exclude them where necessary.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003eexclude_ind:\u003c/code\u003e Sample name(s) that will be excluded from the workflow. Should\nbe a list in [].\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eexcl_pca-admix:\u003c/code\u003e Sample name(s) that will be excluded \u003cem\u003eonly\u003c/em\u003e from PCA and\nAdmixture analyses. Useful for close relatives that violate the assumptions\nof these analyses, but that you want in others. Should be a list in [].\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eAnalysis Selection\u003c/h4\u003e\u003ca id=\"user-content-analysis-selection\" class=\"anchor\" aria-label=\"Permalink: Analysis Selection\" href=\"#analysis-selection\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eHere, you will define which analyses you will perform. It is useful to start\nwith only a few, and add more in subsequent workflow runs, just to ensure you\ncatch errors before you use compute time running all analyses. Most are set\nwith (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e) or a value, described below. Modifications to the\nsettings for each analysis are set in the next section.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003ccode\u003epopulations:\u003c/code\u003e A list of populations found in your sample list to limit\npopulation analyses to. Might be useful if you want to perform individual\nanalyses on some samples but not include them in any population level\nanalyses\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003ccode\u003eanalyses:\u003c/code\u003e\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003egenmap:\u003c/code\u003e Filter out sites with low mappability estimated by Genmap\n(\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003erepeatmasker:\u003c/code\u003e (NOTE: Only one of the three options should be filled/true)\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003elocal_lib:\u003c/code\u003e Filter repeats by masking with an already made library you\nhave locally. Should be file path.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003edfam_lib:\u003c/code\u003e Filter repeats using a library available from dfam. Should be\na taxonomic group name.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ebuild_lib:\u003c/code\u003e Use RepeatModeler to build a library of repeats from the\nreference itself, then filter them from analysis (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e).\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eextreme_depth:\u003c/code\u003e Filter out sites with extremely high or low global\nsequencing depth (\u003ccode\u003e[lower, upper]\u003c/code\u003e). The value of \u003ccode\u003elower\u003c/code\u003e (float) will be\nmultiplied by the median global depth to create a lower global depth\nthreshold, and \u003ccode\u003eupper\u003c/code\u003e will do the same to creat an upper threshold. This\nis done for all samples, as well as separately for depth groupings defined\nin samples file.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003edataset_missing_data:\u003c/code\u003e A floating point value between 0 and 1. Sites with\ndata for fewer than this proportion of individuals across the whole dataset\nwill be filtered out.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003epopulation_missing_data:\u003c/code\u003e A floating point value between 0 and 1. Sites\nwith data for fewer than this proportion of individuals in any population\nwill be filtered out for all populations.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003equalimap:\u003c/code\u003e Perform Qualimap bamqc on bam files for general quality stats\n(\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003edamageprofiler:\u003c/code\u003e Estimate post-mortem DNA damage on historical samples\nwith Damageprofiler (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e) NOTE: This just adds the addition of\nDamageprofiler to the already default output of MapDamage. DNA damage will\nalways be estimated and rescaled by MapDamage for samples marked as\n\u0027historical\u0027\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eestimate_ld:\u003c/code\u003e Estimate pairwise linkage disquilibrium between sites with\nngsLD for each popualation and the whole dataset. Note, only set this if\nyou want to generate the LD estimates for use in downstream analyses\noutside this workflow. Other analyses within this workflow that require LD\nestimates (LD decay/pruning) will function properly regardless of the\nsetting here. (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eld_decay:\u003c/code\u003e Use ngsLD to plot LD decay curves for each population and for\nthe dataset as a whole (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003epca_pcangsd:\u003c/code\u003e Perform Principal Component Analysis with PCAngsd\n(\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eadmix_ngsadmix:\u003c/code\u003e Perform admixture analysis with NGSadmix (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003erelatedness:\u003c/code\u003e Can be performed multiple ways, set any combination of the\nthree options below. Note, that I\u0027ve mostly incorporated these with the\nR0/R1/KING kinship methods in Waples et al. 2019, \u003cem\u003eMol. Ecol.\u003c/em\u003e in mind.\nThese methods differ slightly from how they implement this method, and will\ngive slightly more/less accurate estimates of kinship depending on your\nreference\u0027s relationship to your samples. \u003ccode\u003eibsrelate_ibs\u003c/code\u003e uses the\nprobabilities of all possible genotypes, so should be the most accurate\nregardless, but can use a lot of memory and take a long time with many\nsamples. \u003ccode\u003eibsrelate_sfs\u003c/code\u003e is a bit more efficient, as it does things in a\npairwise fashion in parallel, but may be biased if the segregating alleles\nin your populations are not represented in the reference. \u003ccode\u003engsrelate\u003c/code\u003e uses\nseveral methods, one of which is similar to \u003ccode\u003eibsrelate_sfs\u003c/code\u003e, but may be\nless accurate due to incorporating in less data. In my experience,\nNGSrelate is suitable to identify up to third degree relatives in the\ndataset, but only if the exact relationship can be somewhat uncertain (i.e.\nyou don\u0027t need to know the difference between, say, parent/offspring and\nfull sibling pairs, or between second degree and third degree relatives).\nIBSrelate_sfs can get you greater accuracy, but may erroneously inflate\nkinship if your datset has many alleles not represented in your reference.\nIf you notice, for instance, a large number of third degree relatives\n(KING ~0.03 - 0.07) in your dataset that is not expected, it may be worth\ntrying the IBS based method (\u003ccode\u003eibsrelate_ibs\u003c/code\u003e).\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003engsrelate:\u003c/code\u003e Co-estimate inbreeding and pairwise relatedness with\nNGSrelate (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eibsrelate_ibs:\u003c/code\u003e Estimate pairwise relatedness with the IBS based method\nfrom Waples et al. 2019, \u003cem\u003eMol. Ecol.\u003c/em\u003e. This can use a lot of memory, as\nit has genotype likelihoods for all sites from all samples loaded into\nmemory, so it is done per \u0027chunk\u0027, which still takes a lot of time and\nmemory. (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eibsrelate_sfs:\u003c/code\u003e Estimate pairwise relatedness with the SFS based method\nfrom Waples et al. 2019, \u003cem\u003eMol. Ecol.\u003c/em\u003e. Enabling this can greatly increase\nthe time needed to build the workflow DAG if you have many samples. As a\nform of this method is implemented in NGSrelate, it may be more\nefficient to only enable that. (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ethetas_angsd:\u003c/code\u003e Estimate pi, theta, and Tajima\u0027s D for each population in\nwindows across the genome using ANGSD (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eheterozygosity_angsd:\u003c/code\u003e Estimate individual genome-wide heterozygosity\nusing ANGSD (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003efst_angsd:\u003c/code\u003e Estimate pairwise $F_{ST}$ using ANGSD. Set one or both of the\nbelow options. Estimates both globally and in windows across the genome.\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003epopulations:\u003c/code\u003e Pairwise $F_{ST}$ is calculated between all possible\npopulation pairs (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eindividuals:\u003c/code\u003e Pairwise $F_{ST}$ is calculated between all possible\npopulation pairs. NOTE: This can be really intensive on the DAG building\nprocess, so I don\u0027t recommend enabling unless you\u0027re certain you want\nthis (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003einbreeding_ngsf-hmm:\u003c/code\u003e Estimates inbreeding coefficients and runs of\nhomozygosity using ngsF-HMM. Output is converted into an inbreeding measure\n$F_ROH$, which describes the proportion of the genome in runs of\nhomozygosity over a certain length. (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eibs_matrix:\u003c/code\u003e Estimate pairwise identity by state distance between all\nsamples using ANGSD. (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eFilter Sets\u003c/h4\u003e\u003ca id=\"user-content-filter-sets\" class=\"anchor\" aria-label=\"Permalink: Filter Sets\" href=\"#filter-sets\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eBy default, this workflow will perform all analyses requested in the above\nsection on all sites that pass the filters set in the above section. These\noutputs will contain \u003ccode\u003eallsites-filts\u003c/code\u003e in the filename and in the report.\nHowever, many times, it is useful to perform an analysis on different subsets\nof sites, for instance, to compare results for genic vs. intergenic regions,\nneutral sites, exons vs. introns, etc. Here, users can set an arbitrary number\nof additional filters using BED files. For each BED file supplied, the contents\nwill be intersected with the sites passing the filters set in the above\nsection, and all analyses will be performed additionally using those sites.\u003c/p\u003e\n\u003cp\u003eFor instance, given a BED file containing putatively neutral sites, one could\nset the following:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003efilter_beds\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eneutral-sites\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eresources/neutral_sites.bed\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eIn this case, for each requested analysis, in addition to the \u003ccode\u003eallsites-filts\u003c/code\u003e\noutput, a \u003ccode\u003eneutral-filts\u003c/code\u003e (named after the key assigned to the BED file in\n\u003ccode\u003econfig.yaml\u003c/code\u003e) output will also be generated, containing the results for sites\nwithin the specified BED file that passed any set filters.\u003c/p\u003e\n\u003cp\u003eMore than one BED file can be set, up to an arbitrary number:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003efilter_beds\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eneutral\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eresources/neutral_sites.bed\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eintergenic\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eresources/intergenic_sites.bed\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eintrons\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eresources/introns.bed\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eIt may also sometimes be desireable to skip analyses on \u003ccode\u003eallsites-filts\u003c/code\u003e, say\nif you are trying to only generate diversity estimates or generate SFS for a\nset of neutral sites you supply. To skip running any analyses for\n\u003ccode\u003eallsites-filts\u003c/code\u003e and only perform them for the BED files you supply, you can\nset \u003ccode\u003eonly_filter_beds: true\u003c/code\u003e in the config file. This may also be useful in the\nevent you have a set of already filtered sites, and want to run the workflow on\nthose, ignoring any of the built in filter options by setting them to \u003ccode\u003efalse\u003c/code\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eSoftware Configuration\u003c/h4\u003e\u003ca id=\"user-content-software-configuration\" class=\"anchor\" aria-label=\"Permalink: Software Configuration\" href=\"#software-configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eThese are software specific settings that can be user configured in the\nworkflow. If you are missing a configurable setting you need, open up an issue\nor a pull request and I\u0027ll gladly put it in.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003ccode\u003emapQ:\u003c/code\u003e Phred-scaled mapping quality filter. Reads below this threshold will\nbe filtered out. (integer)\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003ccode\u003ebaseQ:\u003c/code\u003e Phred-scaled base quality filter. Reads below this threshold will be\nfiltered out. (integer)\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003ccode\u003eparams:\u003c/code\u003e\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003egenmap:\u003c/code\u003e Parameters for mappability analysis, see \u003ca href=\"https://github.com/cpockrandt/genmap/\"\u003eGenMap\u0027s documentation\u003c/a\u003e\nfor more details.\n\u003cul\u003e\n\u003cli\u003e\u003ccode\u003eK:\u003c/code\u003e\u003c/li\u003e\n\u003cli\u003e\u003ccode\u003eE:\u003c/code\u003e\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003emap_thresh:\u003c/code\u003e A threshold mappability score. Sites with a mappability\nscore below this threshold are filtered out if GenMap is enabled.\n(integer/float, 0-1)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003efastp:\u003c/code\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003eextra:\u003c/code\u003e Additional options to pass to fastp trimming. (string)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003epicard:\u003c/code\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003eMarkDuplicates:\u003c/code\u003e Additional options to pass to Picard MarkDuplicates.\n\u003ccode\u003e--REMOVE_DUPLICATES true\u003c/code\u003e is recommended. (string)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eangsd:\u003c/code\u003e General options in ANGSD, relevant doc pages are linked\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003egl_model:\u003c/code\u003e Genotype likelihood model to use in calculation\n(\u003ccode\u003e-GL\u003c/code\u003e option in ANGSD, \u003ca href=\"http://www.popgen.dk/angsd/index.php/Genotype_Likelihoods\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003emaxdepth:\u003c/code\u003e When calculating individual depth, sites with depth higher\nthan this will be binned to this value. Should be fine for most to leave\nat \u003ccode\u003e1000\u003c/code\u003e. (integer, \u003ca href=\"http://www.popgen.dk/angsd/index.php/Depth\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eextra:\u003c/code\u003e Additional options to pass to ANGSD during genotype likelihood\ncalculation. This is primarily useful for adding BAM input filters. Note\nthat \u003ccode\u003e--remove_bads\u003c/code\u003e and \u003ccode\u003e-only_proper_pairs\u003c/code\u003e are enabled by default, so\nthey only need to be included if you want to turn them off. I\u0027ve also\nfound that for some datasets, \u003ccode\u003e-C 50\u003c/code\u003e and \u003ccode\u003e-baq 1\u003c/code\u003e can create a strong\nrelationship between sample depth and detected diversity, effectively\nremoving the benefits of ANGSD for low/variable depth data. I recommend\nthat these aren\u0027t included unless you know you need them, and even then,\nI\u0027d recommend plotting \u003ccode\u003eheterozygosity ~ sample depth\u003c/code\u003e to ensure there is\nnot any relationship. Since the workflow uses bwa to map, \u003ccode\u003e-uniqueOnly 1\u003c/code\u003e\ndoesn\u0027t do anything if your minimum mapping quality is \u0026gt; 0. Don\u0027t put\nmapping and base quality thresholds here either, it will use the ones\ndefined above automatically. Although historical samples will have DNA\ndamaged assessed and to some extent, corrected, it may be useful to put\n\u003ccode\u003e-noTrans 1\u003c/code\u003e or \u003ccode\u003e-trim INT\u003c/code\u003e here if you\u0027re interested in stricter filters\nfor degraded DNA. (string, \u003ca href=\"http://www.popgen.dk/angsd/index.php/Input#BAM.2FCRAM\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003esnp_pval:\u003c/code\u003e The p-value to use for calling SNPs (float, \u003ca href=\"http://www.popgen.dk/angsd/index.php/SNP_calling\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003emin_maf:\u003c/code\u003e The minimum minor allele frequency required to call a SNP.\n(float, \u003ca href=\"http://www.popgen.dk/angsd/index.php/Allele_Frequencies\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003engsld:\u003c/code\u003e Settings for ngsLD (\u003ca href=\"https://github.com/fgvieira/ngsLD\"\u003edocs\u003c/a\u003e)\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003emax_kb_dist_est-ld:\u003c/code\u003e For the LD estimates generated when setting\n\u003ccode\u003eestimate_ld: true\u003c/code\u003e above, set the maximum distance between sites in kb\nthat LD will be estimated for (\u003ccode\u003e--max_kb_dist\u003c/code\u003e in ngsLD, integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003emax_kb_dist_decay:\u003c/code\u003e The same as \u003ccode\u003emax_kb_dist_est-ld:\u003c/code\u003e, but used when\nestimating LD decay when setting \u003ccode\u003eld_decay: true\u003c/code\u003e above (integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003emax_kb_dist_pruning:\u003c/code\u003e The same as \u003ccode\u003emax_kb_dist_est-ld:\u003c/code\u003e, but used when\nlinkage pruning SNPs as inputs for PCA, Admix, and Inbreeding analyses.\nAny positions above this distance will be assumed to be in linkage\nequilibrium during the pruning process (integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ernd_sample_est-ld:\u003c/code\u003e For the LD estimates generated when setting\n\u003ccode\u003eestimate_ld: true\u003c/code\u003e above, randomly sample this proportion of pairwise\nlinkage estimates rather than estimating all (\u003ccode\u003e--rnd_sample\u003c/code\u003e in ngsLD,\nfloat)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ernd_sample_decay:\u003c/code\u003e The same as \u003ccode\u003ernd_sample_est-ld:\u003c/code\u003e, but used when\nestimating LD decay when setting \u003ccode\u003eld_decay: true\u003c/code\u003e above (float)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003efit_LDdecay_extra:\u003c/code\u003e Additional plotting arguments to pass to\n\u003ccode\u003efit_LDdecay.R\u003c/code\u003e when estimating LD decay (string)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003efit_LDdecay_n_correction:\u003c/code\u003e When estimating LD decay, should the sample\nsize corrected r^2 model be used? (\u003ccode\u003etrue\u003c/code\u003e/\u003ccode\u003efalse\u003c/code\u003e, \u003ccode\u003etrue\u003c/code\u003e is the\nequivalent of passing a sample size to \u003ccode\u003efit_LDdecay.R\u003c/code\u003e in ngsLD using\n\u003ccode\u003e--n_ind\u003c/code\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003epruning_min-weight:\u003c/code\u003e The minimum r^2 to assume two positions are in\nlinkage disequilibrium when pruning (float)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003erealSFS:\u003c/code\u003e Settings for realSFS\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003efold:\u003c/code\u003e Whether or not to fold the produced SFS (0 or 1, \u003ca href=\"http://www.popgen.dk/angsd/index.php/SFS_Estimation\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\n\u003cstrong\u003eNOTE:\u003c/strong\u003e I have not implemented the use of an ancestral reference into\nthis workflow, so this should always be set to 1 until I implement this.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003esfsboot:\u003c/code\u003e Doesn\u0027t work now, but when it does it will produce this many\nbootstrapped SFS per population and population pair (integer)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003efst:\u003c/code\u003e Settings for $F_{ST}$ calculation in ANGSD\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003ewhichFst:\u003c/code\u003e Determines which $F_{ST}$ estimator is used by ANGSD. With 0\nbeing the default Reynolds 1983 and 1 being the Bhatia 2013 estimator.\nThe latter is preferable for small or uneven sample sizes\n(0 or 1, \u003ca href=\"http://www.popgen.dk/angsd/index.php/Fst\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ewin_size:\u003c/code\u003e Window size in bp for sliding window analysis (integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ewin_step:\u003c/code\u003e Window step size in bp for sliding window analysis (integer)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ethetas:\u003c/code\u003e Settings for pi, theta, and Tajima\u0027s D estimation\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003ewin_size:\u003c/code\u003e Window size in bp for sliding window analysis (integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ewin_step:\u003c/code\u003e Window step size in bp for sliding window analysis (integer)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003engsadmix:\u003c/code\u003e Settings for admixture analysis with NGSadmix. This analysis is\nperformed for a set of K groupings, and each K has several replicates\nperformed. Replicates will continue until a set of N highest likelihood\nreplicates converge, or the number of replicates reaches an upper threshold\nset here. Defaults for \u003ccode\u003ereps\u003c/code\u003e, \u003ccode\u003eminreps\u003c/code\u003e, \u003ccode\u003ethresh\u003c/code\u003e, and \u003ccode\u003econv\u003c/code\u003e can be left\nas default for most.\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003ekvalues:\u003c/code\u003e A list of values of K to fit the data to (list of integers)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ereps:\u003c/code\u003e The maximum number of replicates to perform per K (integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eminreps:\u003c/code\u003e The minimum number of replicates to perform, even if\nreplicates have converged (integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003ethresh:\u003c/code\u003e The convergence threshold - the top replicates must all be\nwithin this value of log-likelihood units to consider the run converged\n(integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003econv:\u003c/code\u003e The number of top replicates to include in convergence\nassessment. (integer)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eextra:\u003c/code\u003e Additional arguments to pass to NGSadmix (for instance,\nincreasing \u003ccode\u003e-maxiter\u003c/code\u003e). (string, \u003ca href=\"http://www.popgen.dk/software/index.php/NgsAdmix\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003eibs:\u003c/code\u003e Settings for identity by state calculation with ANGSD\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003e-doIBS:\u003c/code\u003e Whether to use a random (1) or consensus (2) base in IBS\ndistance calculation (\u003ca href=\"http://www.popgen.dk/angsd/index.php/PCA_MDS\" rel=\"nofollow\"\u003edocs\u003c/a\u003e)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n", "data_format": 2, @@ -51141,6 +51141,45 @@ var data = ], "updated_at": 1602644548.0 }, + { + "config_readme": null, + "data_format": 2, + "description": "Multi-omics pipeline for the GLAMR database", + "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 458 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 797 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"shell\" at line 803 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 960 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 964 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1017 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1036 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 1056 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1060 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1112 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1644 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1710 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2008 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2055 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2176 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 2215 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 2659 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2881 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2893 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"params\" at line 2917 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"params\" at line 3017 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"shell\" at line 3057 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[ERROR] In file \"/tmp/tmp9ubijvwd/Snakefile\": NoParametersError: L3116: In params definition.\n[INFO] In file \"/tmp/tmp9ubijvwd/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", + "full_name": "Geo-omics/GLAMR_omics_pipelines", + "latest_release": null, + "linting": "/tmp/tmp9ubijvwd/Snakefile:6437: SyntaxWarning: invalid escape sequence \u0027\\;\u0027\n/tmp/tmp9ubijvwd/Snakefile:6458: SyntaxWarning: invalid escape sequence \u0027\\;\u0027\nFileNotFoundError in file /tmp/tmp9ubijvwd/Snakefile, line 35:\n[Errno 2] No such file or directory: \u0027data/sample_metadata/sample_lists/metaG_samples\u0027\n File \"/tmp/tmp9ubijvwd/Snakefile\", line 35, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 6, + "subscribers_count": 4, + "topics": [ + "noaa-omics-software" + ], + "updated_at": 1726031221.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "The Loaders for GraphKB. Imports content from external sources via the GraphKB REST API", + "formatting": "[DEBUG] \n\u003cunknown\u003e:7: SyntaxWarning: invalid escape sequence \u0027\\/\u0027\n\u003cunknown\u003e:10: SyntaxWarning: invalid escape sequence \u0027\\ \u0027\n[ERROR] In file \"/tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile\": NoParametersError: L399: In input definition.\n[INFO] In file \"/tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", + "full_name": "bcgsc/pori_graphkb_loader", + "latest_release": "v8.0.2", + "linting": "/tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile:197: SyntaxWarning: invalid escape sequence \u0027\\ \u0027\n cd {DATA_DIR}/cosmic\n/tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile:206: SyntaxWarning: invalid escape sequence \u0027\\ \u0027\n rule load_local:\nLints for snakefile /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile:\n * Absolute path \"/\u003e\u003cAttribute name = \" in line 69:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/releases/[^\" in line 118:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n * Path composition with \u0027+\u0027 in line 211:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 220:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 229:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 239:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 248:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 257:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 266:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 275:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 284:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 292:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 333:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 344:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 353:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 363:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 373:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 384:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 394:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 402:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 412:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 424:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 434:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 443:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 459:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n\nLints for rule download_ncit (line 48, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_ncit_fda (line 58, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_ensembl (line 65, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_fda_srs (line 74, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_refseq (line 86, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_uberon (line 94, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_do (line 102, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_drugbank (line 113, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable filename from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable filename from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule download_PMC4468049 (line 127, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_PMC4232638 (line 135, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cgi (line 143, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_local_data (line 152, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cancerhotspots (line 160, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cosmic_resistance (line 171, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cosmic_diseases (line 183, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cosmic_fusions (line 194, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule all_drugs (line 304, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule all_diseases (line 311, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule download_sources (line 447, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule all_ontologies (line 462, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 6, + "subscribers_count": 6, + "topics": [ + "pori", + "etl" + ], + "updated_at": 1726008134.0 + }, { "config_readme": null, "data_format": 2, @@ -51218,45 +51257,6 @@ var data = "topics": [], "updated_at": 1725434923.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "Multi-omics pipeline for the GLAMR database", - "formatting": "[DEBUG] \n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 458 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 797 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"shell\" at line 803 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 960 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 964 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1017 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1036 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 1056 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1060 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1112 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1644 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 1710 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2008 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2055 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2176 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 2215 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"output\" at line 2659 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2881 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"input\" at line 2893 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"params\" at line 2917 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"params\" at line 3017 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmp9ubijvwd/Snakefile\": Keyword \"shell\" at line 3057 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[ERROR] In file \"/tmp/tmp9ubijvwd/Snakefile\": NoParametersError: L3116: In params definition.\n[INFO] In file \"/tmp/tmp9ubijvwd/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", - "full_name": "Geo-omics/GLAMR_omics_pipelines", - "latest_release": null, - "linting": "/tmp/tmp9ubijvwd/Snakefile:6437: SyntaxWarning: invalid escape sequence \u0027\\;\u0027\n/tmp/tmp9ubijvwd/Snakefile:6458: SyntaxWarning: invalid escape sequence \u0027\\;\u0027\nFileNotFoundError in file /tmp/tmp9ubijvwd/Snakefile, line 35:\n[Errno 2] No such file or directory: \u0027data/sample_metadata/sample_lists/metaG_samples\u0027\n File \"/tmp/tmp9ubijvwd/Snakefile\", line 35, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 6, - "subscribers_count": 4, - "topics": [ - "noaa-omics-software" - ], - "updated_at": 1726031221.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "The Loaders for GraphKB. Imports content from external sources via the GraphKB REST API", - "formatting": "[DEBUG] \n\u003cunknown\u003e:7: SyntaxWarning: invalid escape sequence \u0027\\/\u0027\n\u003cunknown\u003e:10: SyntaxWarning: invalid escape sequence \u0027\\ \u0027\n[ERROR] In file \"/tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile\": NoParametersError: L399: In input definition.\n[INFO] In file \"/tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile\": 1 file(s) raised parsing errors \ud83e\udd15\n\nsnakefmt version: 0.10.2", - "full_name": "bcgsc/pori_graphkb_loader", - "latest_release": "v8.0.2", - "linting": "/tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile:197: SyntaxWarning: invalid escape sequence \u0027\\ \u0027\n cd {DATA_DIR}/cosmic\n/tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile:206: SyntaxWarning: invalid escape sequence \u0027\\ \u0027\n rule load_local:\nLints for snakefile /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile:\n * Absolute path \"/\u003e\u003cAttribute name = \" in line 69:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Absolute path \"/releases/[^\" in line 118:\n Do not define absolute paths inside of the workflow, since this renders\n your workflow irreproducible on other machines. Use path relative to the\n working directory instead, or make the path configurable via a config\n file.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/configuration.html#configuration\n * Mixed rules and functions in same snakefile.:\n Small one-liner functions used only once should be defined as lambda\n expressions. Other functions should be collected in a common module, e.g.\n \u0027rules/common.smk\u0027. This makes the workflow steps more readable.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/modularization.html#includes\n * Path composition with \u0027+\u0027 in line 211:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 220:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 229:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 239:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 248:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 257:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 266:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 275:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 284:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 292:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 333:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 344:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 353:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 363:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 373:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 384:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 394:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 402:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 412:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 424:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 434:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 443:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n * Path composition with \u0027+\u0027 in line 459:\n This becomes quickly unreadable. Usually, it is better to endure some\n redundancy against having a more readable workflow. Hence, just repeat\n common prefixes. If path composition is unavoidable, use pathlib or\n (python \u003e= 3.6) string formatting with f\"...\".\n\nLints for rule download_ncit (line 48, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_ncit_fda (line 58, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_ensembl (line 65, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_fda_srs (line 74, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_refseq (line 86, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_uberon (line 94, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_do (line 102, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_drugbank (line 113, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n * Shell command directly uses variable filename from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n * Shell command directly uses variable filename from outside of the rule:\n It is recommended to pass all files as input and output, and non-file\n parameters via the params directive. Otherwise, provenance tracking is\n less accurate.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules\n\nLints for rule download_PMC4468049 (line 127, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_PMC4232638 (line 135, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cgi (line 143, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_local_data (line 152, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cancerhotspots (line 160, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cosmic_resistance (line 171, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cosmic_diseases (line 183, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule download_cosmic_fusions (line 194, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule all_drugs (line 304, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule all_diseases (line 311, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\nLints for rule download_sources (line 447, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n * Specify a conda environment or container for each rule.:\n This way, the used software for each specific step is documented, and the\n workflow can be executed on any machine without prerequisites.\n Also see:\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#integrated-package-management\n https://snakemake.readthedocs.io/en/latest/snakefiles/deployment.html#running-jobs-in-containers\n\nLints for rule all_ontologies (line 462, /tmp/tmpcjqs1_rr/bcgsc-pori_graphkb_loader-4c25b32/Snakefile):\n * No log directive defined:\n Without a log directive, all output will be printed to the terminal. In\n distributed environments, this means that errors are harder to discover.\n In local environments, output of concurrent jobs will be mixed and become\n unreadable.\n Also see:\n https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#log-files\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 6, - "subscribers_count": 6, - "topics": [ - "pori", - "etl" - ], - "updated_at": 1726008134.0 - }, { "config_readme": null, "data_format": 2, @@ -51691,6 +51691,38 @@ var data = "topics": [], "updated_at": 1612269653.0 }, + { + "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eInstallation\u003c/h2\u003e\u003ca id=\"user-content-installation\" class=\"anchor\" aria-label=\"Permalink: Installation\" href=\"#installation\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 1 - Install a Conda-based Python3 distribution\u003c/h3\u003e\u003ca id=\"user-content-step-1---install-a-conda-based-python3-distribution\" class=\"anchor\" aria-label=\"Permalink: Step 1 - Install a Conda-based Python3 distribution\" href=\"#step-1---install-a-conda-based-python3-distribution\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eIf you do not already have Conda installed on your machine/server, install a Conda-based Python3 distribution. We recommend \u003ca href=\"https://github.com/conda-forge/miniforge#mambaforge\"\u003eMambaforge\u003c/a\u003e, which includes Mamba, a fast and robust replacement for the Conda package manager. Mamba is preferred over the default Conda solver due to its speed and reliability.\u003c/p\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e Conda (or Mamba) is needed to run SpikeFlow.\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 2 - Install Snakemake\u003c/h3\u003e\u003ca id=\"user-content-step-2---install-snakemake\" class=\"anchor\" aria-label=\"Permalink: Step 2 - Install Snakemake\" href=\"#step-2---install-snakemake\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo run this pipeline, you\u0027ll need to install \u003cstrong\u003eSnakemake\u003c/strong\u003e.\u003c/p\u003e\n\u003cp\u003eIf you already have it installed in a conda environment, please check with the command \u003ccode\u003esnakemake --version\u003c/code\u003e and ensure a version \u003cstrong\u003e\u0026gt;= 7.17.0\u003c/strong\u003e.\nOtherwise, please follow the instructions below.\u003c/p\u003e\n\u003cp\u003eOnce you have \u003cem\u003econda\u003c/em\u003e installed, you can create a new environment and install Snakemake with:\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003econda create -c bioconda -c conda-forge -n snakemake snakemake\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eFor mamba, use the following code:\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003e mamba create -c conda-forge -c bioconda -n snakemake snakemake\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eOnce the environment is created, activate it with:\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003econda activate snakemake\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eor\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003emamba activate snakemake\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eFor further information please check the Snakemake documentation on \u003ca href=\"https://snakemake.readthedocs.io/en/stable/getting_started/installation.html\" rel=\"nofollow\"\u003ehow to install\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 3 - Install Singularity (recommended)\u003c/h3\u003e\u003ca id=\"user-content-step-3---install-singularity-recommended\" class=\"anchor\" aria-label=\"Permalink: Step 3 - Install Singularity (recommended)\" href=\"#step-3---install-singularity-recommended\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eFor a fast workflow installation, it is recommended to use \u003cstrong\u003eSingularity\u003c/strong\u003e (compatible with version 3.9.5). This bypasses the need for \u003cem\u003eConda\u003c/em\u003e to set up required environments, as these are already present within the container that will be pulled from \u003ca href=\"https://hub.docker.com/r/davidebrex/spikeflow\" rel=\"nofollow\"\u003edockerhub\u003c/a\u003e with the use of the \u003ccode\u003e--software-deployment-method conda apptainer\u003c/code\u003e flag.\u003c/p\u003e\n\u003cp\u003eTo install singularity check \u003ca href=\"https://docs.sylabs.io/guides/3.0/user-guide/installation.html\" rel=\"nofollow\"\u003eits website\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 4 - Download SpikeFlow\u003c/h3\u003e\u003ca id=\"user-content-step-4---download-spikeflow\" class=\"anchor\" aria-label=\"Permalink: Step 4 - Download SpikeFlow\" href=\"#step-4---download-spikeflow\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo obtain SpikeFlow, you have two options:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003eDownload the source code as zip file from the latest \u003ca href=\"https://github.com/DavideBrex/SpikeFlow/releases/latest\"\u003eversion\u003c/a\u003e. For example: \u003ccode\u003ewget https://github.com/DavideBrex/SpikeFlow/archive/refs/tags/v1.2.0.zip\u003c/code\u003e will download a zip file. Unzip it and move to the Spikeflow-1.2.0 folder.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eClone the repository on your local machine. See \u003ca href=\"https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository\"\u003ehere\u003c/a\u003e the instructions.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 5 - Test the workflow\u003c/h3\u003e\u003ca id=\"user-content-step-5---test-the-workflow\" class=\"anchor\" aria-label=\"Permalink: Step 5 - Test the workflow\" href=\"#step-5---test-the-workflow\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eOnce you obtained the latest version of SpikeFlow, the \u003ccode\u003econfig.yaml\u003c/code\u003e and the \u003ccode\u003esamples_sheet.csv\u003c/code\u003e files are already set to run an installation test.\nYou can open them to have an idea about their structure.\nAll the files needed for the test are in the \u003ccode\u003e.test\u003c/code\u003e folder (on ubuntu, type \u003cem\u003ectrl + h\u003c/em\u003e to see hidden files and folders).\u003c/p\u003e\n\u003cp\u003e\u003cstrong\u003eTo test whether SpikeFlow is working properly, jump directly to the \u003ca href=\"#run\"\u003eRun the workflow\u003c/a\u003e section of the documentation.\u003c/strong\u003e\u003c/p\u003e\n\u003cp\u003eThe usage of this workflow is also described in the \u003ca href=\"https://snakemake.github.io/snakemake-workflow-catalog/?usage=DavideBrex%2FSpikeFlow\" rel=\"nofollow\"\u003eSnakemake Workflow Catalog\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eConfiguration\u003c/h2\u003e\u003ca id=\"user-content-configuration\" class=\"anchor\" aria-label=\"Permalink: Configuration\" href=\"#configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003e1. \u003cstrong\u003eSample Sheet Input Requirements\u003c/strong\u003e\n\u003c/h3\u003e\u003ca id=\"user-content-1-sample-sheet-input-requirements\" class=\"anchor\" aria-label=\"Permalink: 1. Sample Sheet Input Requirements\" href=\"#1-sample-sheet-input-requirements\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003ePrior to executing the pipeline, you need to prepare a sample sheet containing detailed information about the samples to analyse. You can find an example of this file under \u003ccode\u003econfig/samples_sheet.csv\u003c/code\u003e.\nThe required format is a comma-separated values (CSV) file, consisting of eight columns and including a header row.\nFor each sample (row), you need to specify:\u003c/p\u003e\n\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003eColumn Name\u003c/th\u003e\n\u003cth\u003eDescription\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003esample\u003c/td\u003e\n\u003ctd\u003eUnique sample name\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003ereplicate\u003c/td\u003e\n\u003ctd\u003eInteger indicating the number of replicate (if no replicate simply add 1)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eantibody\u003c/td\u003e\n\u003ctd\u003eAntibody used for the experiment (leave empty for Input samples)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003econtrol\u003c/td\u003e\n\u003ctd\u003eUnique sample name of the control (it has to be specified also in the sample column, but in another row)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003econtrol_replicate\u003c/td\u003e\n\u003ctd\u003eInteger indicating the number of replicate for the control sample (if no replicate simply add 1)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003epeak_type\u003c/td\u003e\n\u003ctd\u003eCan only be equal to: narrow, broad, very-broad. It indicates the type of peak calling to perform\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003efastq_1\u003c/td\u003e\n\u003ctd\u003ePath to the fastq file of the sample (if paired-end, here goes the forward mate, i.e. R1)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003efastq_2\u003c/td\u003e\n\u003ctd\u003eONLY for paired-end, otherwise leave empty. Path to the fastq file of the reverse mate (i.e. R2)\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\n\u003cp\u003eFor the input samples, leave empty the values of the all the columns except for sample, replicate and fastq path(s).\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eExample 1 (single end)\u003c/h4\u003e\u003ca id=\"user-content-example-1-single-end\" class=\"anchor\" aria-label=\"Permalink: Example 1 (single end)\" href=\"#example-1-single-end\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003esample\u003c/th\u003e\n\u003cth\u003ereplicate\u003c/th\u003e\n\u003cth\u003eantibody\u003c/th\u003e\n\u003cth\u003econtrol\u003c/th\u003e\n\u003cth\u003econtrol_replicate\u003c/th\u003e\n\u003cth\u003epeak_type\u003c/th\u003e\n\u003cth\u003efastq_1\u003c/th\u003e\n\u003cth\u003efastq_2\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K4me3_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003eH3K4me3\u003c/td\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003enarrow\u003c/td\u003e\n\u003ctd\u003efastq/H3K4me3_untreated-1_L1.fastq.gz\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K4me3_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003eH3K4me3\u003c/td\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003enarrow\u003c/td\u003e\n\u003ctd\u003efastq/H3K4me3_untreated-1_L2.fastq.gz\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003efastq/Input-untreated-1_fastq.gz\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e If your sample has \u003cstrong\u003emultiple lanes\u003c/strong\u003e, you can simple add a new row with the same values in all the columns except for fastq_1 (and fastq_2 if PE). In the table above, H3K4me3_untreated has two lanes\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eExample 2 (paired end)\u003c/h4\u003e\u003ca id=\"user-content-example-2-paired-end\" class=\"anchor\" aria-label=\"Permalink: Example 2 (paired end)\" href=\"#example-2-paired-end\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003esample\u003c/th\u003e\n\u003cth\u003ereplicate\u003c/th\u003e\n\u003cth\u003eantibody\u003c/th\u003e\n\u003cth\u003econtrol\u003c/th\u003e\n\u003cth\u003econtrol_replicate\u003c/th\u003e\n\u003cth\u003epeak_type\u003c/th\u003e\n\u003cth\u003efastq_1\u003c/th\u003e\n\u003cth\u003efastq_2\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K9me2_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003eH3K9me2\u003c/td\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003every-broad\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_untreated-1_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_untreated-1_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K9me2_untreated\u003c/td\u003e\n\u003ctd\u003e2\u003c/td\u003e\n\u003ctd\u003eH3K9me2\u003c/td\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003every-broad\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_untreated-2_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_untreated-2_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K9me2_EGF\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003eH3K9me2\u003c/td\u003e\n\u003ctd\u003eInput_EGF\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003every-broad\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_EGF-1_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_EGF-1_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K9me2_EGF\u003c/td\u003e\n\u003ctd\u003e2\u003c/td\u003e\n\u003ctd\u003eH3K9me2\u003c/td\u003e\n\u003ctd\u003eInput_EGF\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003every-broad\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_EGF-2_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_EGF-2_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003efastq/Input-untreated-1_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/Input-untreated-1_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eInput_EGF\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003efastq/Input-EGF-1_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/Input-EGF-1_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e In this case, we have two replicates per condition (untreated and EGF) and the samples are paired-end. However, \u003cstrong\u003emixed situations (some single and some paired-end samples) are also accepted by the pipeline.\u003c/strong\u003e\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003e2. \u003cstrong\u003eConfig file\u003c/strong\u003e\n\u003c/h3\u003e\u003ca id=\"user-content-2-config-file\" class=\"anchor\" aria-label=\"Permalink: 2. Config file\" href=\"#2-config-file\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eThe last step before running the workflow is to adjust the parameters in the config file (\u003ccode\u003econfig/config.yaml\u003c/code\u003e). The file is written in YAML (Yet Another Markup Language), which is a human-readable data serialization format. It contains key-value pairs that can be nested to multiple leves.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eReference and exogenous (spike-in) genomes\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-reference-and-exogenous-spike-in-genomes\" class=\"anchor\" aria-label=\"Permalink: Reference and exogenous (spike-in) genomes\" href=\"#reference-and-exogenous-spike-in-genomes\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo execute the pipeline, it\u0027s essential to specify both \u003cem\u003eendogenous\u003c/em\u003e and \u003cem\u003eexogenous\u003c/em\u003e species in the assembly field; for example, use Drosophila (dm16) as the exogenous and Human (hg38) as the endogenous species. You can find the the genome assembly on the \u003ca href=\"https://genome-euro.ucsc.edu/cgi-bin/hgGateway\" rel=\"nofollow\"\u003eUCSC Genome Browser\u003c/a\u003e.\u003c/p\u003e\n\u003cp\u003eIf a bowtie2 genome index is already available for the merged genomes (e.g. hg38 + dm16), you should input the path (ending with the index files prefix) in the \u0027resources\u0027 section of the pipeline configuration. This setup ensures proper alignment and processing of your genomic data. \u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e The index must be created with bowtie2 v2.5.3.\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003eresources\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eref\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eindex\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e/path/to/hg38_dm16_merged.bowtie2.index/indexFilesPrefix\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003e ucsc genome name (e.g. hg38, mm10, etc)\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eassembly\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003ehg38\u003c/span\u003e\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003eblacklist regions \u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eblacklist\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e.test/data/hg38-blacklist.v2.bed\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n\n \u003cspan class=\"pl-ent\"\u003eref_spike\u003c/span\u003e:\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003e ucsc genome name (e.g. dm6, mm10, etc)\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003espike_assembly\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003edm6\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eIf you don\u0027t have the bowtie2 index readily available, the pipeline will generate it for you. To do so, leave empty the index field in the resources section (see below):\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003eresources\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eref\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eindex\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003e ucsc genome name (e.g. hg38, mm10, etc)\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eassembly\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003ehg38\u003c/span\u003e\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003eblacklist regions \u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eblacklist\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e.test/data/hg38-blacklist.v2.bed\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n\n \u003cspan class=\"pl-ent\"\u003eref_spike\u003c/span\u003e:\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003e ucsc genome name (e.g. hg38, mm10, etc)\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003espike_assembly\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003edm6\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e For the endogenous genome, it\u0027s important to also include the path to blacklisted regions. These regions, often associated with sequencing artifacts or other anomalies, can be downloaded from the Boyle Lab\u0027s Blacklist repository on GitHub. You can access these blacklisted region files \u003ca href=\"https://github.com/Boyle-Lab/Blacklist/tree/master/lists\"\u003ehere\u003c/a\u003e\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eNormalization\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-normalization\" class=\"anchor\" aria-label=\"Permalink: Normalization\" href=\"#normalization\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eIn this field you can choose the type of normalization to perform on the samples. The available options are:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eRAW\u003c/strong\u003e: This is a RPM normalization, i.e. it normalizes the read counts to the total number of reads in a sample, measured per million reads. This method is straightforward but does not account for spike-in.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eOrlando\u003c/strong\u003e: Spike-in normalization as described in \u003ca href=\"https://pubmed.ncbi.nlm.nih.gov/25437568/\" rel=\"nofollow\"\u003eOrlando et al 2014\u003c/a\u003e. Also reffered as Reference-adjusted Reads Per Million (RRPM). It does not incorporate input data in the normalization process.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eRX-Input\u003c/strong\u003e (default): RX-Input is a modified version of the Orlando normalization that accounts for the total number of reads mapped to the spike-in in both the ChIP and input samples. This approach allows for more accurate normalization by accounting for variations in both immunoprecipitation efficiency and background noise (as represented by the input). See \u003ca href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6561741/#bib42\" rel=\"nofollow\"\u003eFursova et al 2019\u003c/a\u003e for further details.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eDownsampling\u003c/strong\u003e: The sample with the minimum umber of spike-in reads is used as the reference. Sample reads from all other samples are downsampled to the same level as this reference sample. This approach is applicable to datasets where the numbers of reads are similar. See \u003ca href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8313745/\" rel=\"nofollow\"\u003eWu et al. 2021\u003c/a\u003e for further details.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eMedian Normalization\u003c/strong\u003e: Normalize to the median. All samples can be normalized to the median value of spike-in reads. This method is not suited for integrating datasets from different sources. See \u003ca href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8313745/\" rel=\"nofollow\"\u003eWu et al. 2021\u003c/a\u003e for further details.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eExample:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003enormalization_type\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eOrlando\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eDifferential Peak analysis\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-differential-peak-analysis\" class=\"anchor\" aria-label=\"Permalink: Differential Peak analysis\" href=\"#differential-peak-analysis\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eSpikeFlow allows you to perform differential peaks analysis. In this case, the grouping variable for the samples will be extracted from the sample name in the \u003cem\u003esample_sheet.csv\u003c/em\u003e (after the last \u0027_\u0027). Also, if \u003ccode\u003eperform_diff_analysis: true\u003c/code\u003e, you will need to specify the contrasts (per antibody), meaning the groups that you want to compare. Please also specify the log2 fold change (log2FCcutoff) and adjusted p-value (padjust) thresholds for differential analysis.\u003c/p\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e Ensure that the group names for the differential peaks analysis and the contrast names do not contain any additional underscores (\u0027_\u0027), and that the antibody names do not contain any underscores (\u0027_\u0027).\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cp\u003eWhen differential peak analysis is enabled, SpikeFlow will create a consensus peak set per antibody and count reads on those peaks. The default behavior to build the consensus regions is to use all the peaks from all the samples (i.e., minNumSamples: 0). However, you can change this to specify the minimum number of samples a peak should be present in to be kept for the consensus peak set (minNumSamples).\u003c/p\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e If \u003ccode\u003euseSpikeinCalledPeaks: true\u003c/code\u003e, spike-normalized peak calling will be executed in addition to the standard peak calling. The resulting regions from the spike-normalized peak calling will be used for consensus peak set generation and differential analysis.\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003ediffPeakAnalysis\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eperform_diff_analysis\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003etrue\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003econtrasts\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eH3K4me3\u003c/span\u003e:\n - \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eEGF_vs_untreated\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003epadjust\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0.01\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003elog2FCcutoff\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e1.5\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eminNumSamples\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e1\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003euseSpikeinCalledPeaks\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003efalse\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eRequired options\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-required-options\" class=\"anchor\" aria-label=\"Permalink: Required options\" href=\"#required-options\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eWhen configuring your pipeline based on the chosen reference/endogenous genome (like mm10 or hg38), two essential options need to be set:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eeffective genome length\u003c/strong\u003e: This is required by deeptools to generate the bigWig files. The value of this parameter is used by the program to adjust the mappable portion of the genome, ensuring that the read densities represented in the BigWig files accurately reflect the underlying biological reality. You can find the possible values for this parameter in the deeptools \u003ca href=\"https://deeptools.readthedocs.io/en/latest/content/feature/effectiveGenomeSize.html\" rel=\"nofollow\"\u003edocumentation\u003c/a\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003echrom sizes\u003c/strong\u003e: To achieve accurate peak calling, it\u0027s important to use the correct chromosome sizes file. The supported genomes\u0027 chromosome sizes are available under \u003ccode\u003eresources/chrom_size\u003c/code\u003e. \u003cstrong\u003eMake sure to select the file that corresponds to your chosen genome\u003c/strong\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eOther (optional) parameters\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-other-optional-parameters\" class=\"anchor\" aria-label=\"Permalink: Other (optional) parameters\" href=\"#other-optional-parameters\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003eTo direct Snakemake to save all outputs in a specific directory, add the desired path in the config file: \u003ccode\u003eoutput_path: \"path/to/directory\"\u003c/code\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eWhile splitting the BAM file into two separate ones (one endogenous and one spike-in), reads with a mapping quality below 8 are discarded. You can adjust this behavior using the bowtie2 \u003ccode\u003emap_quality\u003c/code\u003e field. If no filtering is needed, set this value to 0; otherwise, adjust it from 0 to 30 as needed. For more information on Bowtie2 MAPQ scores, see \u003ca href=\"http://biofinysics.blogspot.com/2014/05/how-does-bowtie2-assign-mapq-scores.html\" rel=\"nofollow\"\u003ehere\u003c/a\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eBroad Peak Calling:\u003c/strong\u003e For samples requiring broad peak calling, adjust the effective genome fraction as per the guidelines on this \u003ca href=\"https://github.com/biocore-ntnu/epic2/blob/master/epic2/effective_sizes/hg38_50.txt\"\u003epage\u003c/a\u003e. The \u003cem\u003e\u0027effective genome size\u0027\u003c/em\u003e mentioned on the GitHub page depends on the read length of your samples.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eVery Broad Peak Callling:\u003c/strong\u003e If you have samples that will undergo very-broad peak calling, please check the log files produced by EDD. This because the tool might fail if it can not accurately estimate the parameters for the peak calling. In this case, you can tweak the parameters in the EDD config file, which is in the config directory (\u003ccode\u003econfig/edd_parameters.conf\u003c/code\u003e). For more information about EDD parameters tuning see the \u003ca href=\"https://github.com/CollasLab/edd\"\u003edocumentation\u003c/a\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eTrimming Option:\u003c/strong\u003e Trimming can be skipped by setting the respective flag to false.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eP-Value Adjustment for Peak Calling:\u003c/strong\u003e Modify the q-values for peak calling in the config file. This applies to different peak calling methods: narrow (macs2), broad (epic2), or very-broad (edd).\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003ePeak Annotation Threshold:\u003c/strong\u003e The default setting annotates a peak within \u00b12500 bp around the promoter region.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eRun the workflow\u003c/h2\u003e\u003ca id=\"user-content-run-the-workflow\" class=\"anchor\" aria-label=\"Permalink: Run the workflow\" href=\"#run-the-workflow\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo execute the pipeline, make sure to be in the main \u003cstrong\u003eSnakemake working directory\u003c/strong\u003e, which includes subfolders like \u0027workflow\u0027, \u0027resources\u0027, and \u0027config\u0027.\u003c/p\u003e\n\u003cp\u003eThe workflow can be operated in two ways: using Conda alone, or a combination of Conda and Singularity (\u003cstrong\u003erecommended\u003c/strong\u003e).\nAfter obtaining a copy of the workflow on your machine, you can verify its proper functioning by executing one of the two commands below.\nThe \u003ccode\u003econfig\u003c/code\u003e and \u003ccode\u003esample_sheet\u003c/code\u003e files come pre-configured for a test run.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eConda and Singularity (recommended)\u003c/h4\u003e\u003ca id=\"user-content-conda-and-singularity-recommended\" class=\"anchor\" aria-label=\"Permalink: Conda and Singularity (recommended)\" href=\"#conda-and-singularity-recommended\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eSnakemake versions \u0026gt;= 8:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell\"\u003e\u003cpre\u003esnakemake --cores 10 --software-deployment-method conda apptainer\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eSnakemake versions \u0026gt;= 7.17 and \u0026lt; 8:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell\"\u003e\u003cpre\u003esnakemake -j 10 --use-conda --use-singularity\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eFirst, the singularity container will be pulled from DockerHub and then the workflow will be executed. To install sigularity, see the \u003ca href=\"#install\"\u003einstallation\u003c/a\u003e section.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eConda only\u003c/h4\u003e\u003ca id=\"user-content-conda-only\" class=\"anchor\" aria-label=\"Permalink: Conda only\" href=\"#conda-only\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eSnakemake versions \u0026gt;= 8:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell\"\u003e\u003cpre\u003esnakemake --cores 10 --software-deployment-method conda\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eSnakemake versions \u0026gt;= 7.17 and \u0026lt; 8:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell\"\u003e\u003cpre\u003esnakemake -j 10 --use-conda\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eThis will install all the required conda envs (it might take a while, just for the first execution).\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eSnakemake flags\u003c/h4\u003e\u003ca id=\"user-content-snakemake-flags\" class=\"anchor\" aria-label=\"Permalink: Snakemake flags\" href=\"#snakemake-flags\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003e--cores\u003c/code\u003e or \u003ccode\u003e-j\u003c/code\u003e: indicates the number of cores. Adjust this number (here set to 10) based on your machine configuration\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003e-n\u003c/code\u003e: add this flag to the command line for a \"dry run,\" which allows Snakemake to display the rules that it would execute, without actually running them.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003e--singularity-args \"-B /shares,/home -e\"\u003c/code\u003e: add this flag only with \u003ccode\u003e--software-deployment-method conda apptainer\u003c/code\u003e or \u003ccode\u003e--use-conda --use-singularity\u003c/code\u003e. It will allow singularity to mount the specified disks (/shares and /home), in this case should be where you have your working dir and files.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eTo execute the pipeline on a HPC cluster, please follow \u003ca href=\"https://snakemake.readthedocs.io/en/stable/tutorial/additional_features.html#cluster-execution\" rel=\"nofollow\"\u003ethese guidelines\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eOutput files\u003c/h2\u003e\u003ca id=\"user-content-output-files\" class=\"anchor\" aria-label=\"Permalink: Output files\" href=\"#output-files\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eAll the outputs of the workflow are stored in the \u003ccode\u003eresults\u003c/code\u003e folder. Additionally, in case of any errors during the workflow execution, the log files are stored within the \u003ccode\u003eresults/logs\u003c/code\u003e directory.\u003c/p\u003e\n\u003cp\u003eThe main outputs of the workflow are:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eMultiQC Report\u003c/strong\u003e\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eIf the differential peaks analysis was activated, you will find scatter plots/volcano and PCA plots in the report.\u003c/li\u003e\n\u003cli\u003ePeak Calling Data: Displays the number of peaks called per sample for each method (MACS2, EPIC2, EDD).\u003c/li\u003e\n\u003cli\u003ePeaks annotation\u003c/li\u003e\n\u003cli\u003eReads Table: Per sample reference and spike-in calculated with the normalisation set by the user.\u003c/li\u003e\n\u003cli\u003eBasic QC with FastQC: Evaluates basic quality metrics of the sequencing data.\u003c/li\u003e\n\u003cli\u003ePhantom Peak Qual Tools: Provides NSC and RSC values, indicating the quality and reproducibility of ChIP samples. NSC measures signal-to-noise ratio, while RSC assesses enrichment strength.\u003c/li\u003e\n\u003cli\u003eFingerprint Plots: Visual representation of the sample quality, showing how reads are distributed across the genome.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003e\u003ccode\u003eresults/QC/multiqc/multiqc_report.html\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003ePeaks Differential Analysis\u003c/strong\u003e\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eIn this folder, you will find the differential peak regions and the volcano/scatter/pca plots for each antibody and contrast.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003e\u003ccode\u003eresults/differentialAnalysis\u003c/code\u003e\u003c/p\u003e\n\u003cp\u003eIf spike-in normalised peak calling was activated, you will find the results of the differential analysis in:\u003c/p\u003e\n\u003cp\u003e\u003ccode\u003eresults/differentialAnalysis/NormalisedPeaks\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eNormalized BigWig Files\u003c/strong\u003e:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eEssential for visualizing read distribution and creating detailed heatmaps.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003e\u003ccode\u003eresults/bigWigs/\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003ePeak Files and Annotation\u003c/strong\u003e:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003eProvides called peaks for each peak calling method. Consensus regions bed files are in \u003ccode\u003e/results/peakCalling/mergedPeaks\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003ePeak annotation using ChIPseeker, resulting in two files for promoter and distal peaks for each sample \u003ccode\u003e/results/peakCalling/peakAnnot\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eStandard peak calling: \u003ccode\u003e/results/peakCalling/\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eSpike-in normalised peak calling: \u003ccode\u003e/results/peakCallingNorm/\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eTroubleshooting\u003c/h2\u003e\u003ca id=\"user-content-troubleshooting\" class=\"anchor\" aria-label=\"Permalink: Troubleshooting\" href=\"#troubleshooting\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003col\u003e\n\u003cli\u003e\n\u003cp\u003eWhen you run SpikeFlow with Singularity ( \u003ccode\u003e--software-deployment-method conda apptainer\u003c/code\u003e), you might get an error if you set the \u003ccode\u003e-n\u003c/code\u003e flag. This happens ONLY at the first execution of the workflow. Remove the flag and it should work.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eThe \u003ccode\u003e--software-deployment-method conda apptainer\u003c/code\u003e option temporary requires about 7 GB of disk space to build the image from Docker Hub. If your \u003ccode\u003e/tmp\u003c/code\u003e directory is full, you\u0027ll encounter a \u003ccode\u003eNo space left on device\u003c/code\u003e error. To avoid this, change the Singularity temp directory to a different disk by setting the \u003ccode\u003eSINGULARITY_TMPDIR\u003c/code\u003e environment variable. More details are available in the \u003ca href=\"https://docs.sylabs.io/guides/latest/user-guide/build_env.html#temporary-folders\" rel=\"nofollow\"\u003eSingularity guide on temporary folders\u003c/a\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eIn case of errors during the execution, please make sure to check the log files of the failing snakemake rule in the log folders\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eFor the R scripts execution in SpikeFlow, the env variable R_LIBS_SITE has to be empty otherwise snakemake will look in that folder for R libraries. To avoid this you can use \u003ccode\u003eunset R_LIBS_SITE\u003c/code\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ol\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eCitation\u003c/h2\u003e\u003ca id=\"user-content-citation\" class=\"anchor\" aria-label=\"Permalink: Citation\" href=\"#citation\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eIf you use this workflow in a paper, don\u0027t forget to give credits to the authors by citing the URL of this (original) repository:\n\u003ca href=\"https://github.com/DavideBrex/SpikeFlow\"\u003ehttps://github.com/DavideBrex/SpikeFlow\u003c/a\u003e\u003c/p\u003e\n\u003cp\u003e\u003cstrong\u003eAuthor\u003c/strong\u003e:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eDavide Bressan (\u003ca href=\"https://twitter.com/BrexDavide\" rel=\"nofollow\"\u003e@DavideBrex\u003c/a\u003e)\u003c/li\u003e\n\u003c/ul\u003e\n", + "data_format": 2, + "description": "Pipeline to analyse ChIP-Rx data, i.e ChIP-Seq with reference exogenous genome spike-in normalization", + "formatting": null, + "full_name": "DavideBrex/SpikeFlow", + "latest_release": "v1.3.0", + "linting": null, + "mandatory_flags": { + "desc": null, + "flags": null + }, + "report": true, + "software_stack_deployment": { + "conda": true, + "singularity": true, + "singularity+conda": true + }, + "standardized": true, + "stargazers_count": 7, + "subscribers_count": 1, + "topics": [ + "bioinformatics", + "bioinformatics-pipeline", + "chip-seq", + "normalisation", + "snakemake", + "snakemake-workflows", + "spike-ins" + ], + "updated_at": 1725893321.0 + }, { "config_readme": null, "data_format": 2, @@ -51742,38 +51774,6 @@ var data = "topics": [], "updated_at": 1725611567.0 }, - { - "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eInstallation\u003c/h2\u003e\u003ca id=\"user-content-installation\" class=\"anchor\" aria-label=\"Permalink: Installation\" href=\"#installation\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 1 - Install a Conda-based Python3 distribution\u003c/h3\u003e\u003ca id=\"user-content-step-1---install-a-conda-based-python3-distribution\" class=\"anchor\" aria-label=\"Permalink: Step 1 - Install a Conda-based Python3 distribution\" href=\"#step-1---install-a-conda-based-python3-distribution\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eIf you do not already have Conda installed on your machine/server, install a Conda-based Python3 distribution. We recommend \u003ca href=\"https://github.com/conda-forge/miniforge#mambaforge\"\u003eMambaforge\u003c/a\u003e, which includes Mamba, a fast and robust replacement for the Conda package manager. Mamba is preferred over the default Conda solver due to its speed and reliability.\u003c/p\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e Conda (or Mamba) is needed to run SpikeFlow.\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 2 - Install Snakemake\u003c/h3\u003e\u003ca id=\"user-content-step-2---install-snakemake\" class=\"anchor\" aria-label=\"Permalink: Step 2 - Install Snakemake\" href=\"#step-2---install-snakemake\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo run this pipeline, you\u0027ll need to install \u003cstrong\u003eSnakemake\u003c/strong\u003e.\u003c/p\u003e\n\u003cp\u003eIf you already have it installed in a conda environment, please check with the command \u003ccode\u003esnakemake --version\u003c/code\u003e and ensure a version \u003cstrong\u003e\u0026gt;= 7.17.0\u003c/strong\u003e.\nOtherwise, please follow the instructions below.\u003c/p\u003e\n\u003cp\u003eOnce you have \u003cem\u003econda\u003c/em\u003e installed, you can create a new environment and install Snakemake with:\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003econda create -c bioconda -c conda-forge -n snakemake snakemake\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eFor mamba, use the following code:\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003e mamba create -c conda-forge -c bioconda -n snakemake snakemake\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eOnce the environment is created, activate it with:\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003econda activate snakemake\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eor\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003emamba activate snakemake\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eFor further information please check the Snakemake documentation on \u003ca href=\"https://snakemake.readthedocs.io/en/stable/getting_started/installation.html\" rel=\"nofollow\"\u003ehow to install\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 3 - Install Singularity (recommended)\u003c/h3\u003e\u003ca id=\"user-content-step-3---install-singularity-recommended\" class=\"anchor\" aria-label=\"Permalink: Step 3 - Install Singularity (recommended)\" href=\"#step-3---install-singularity-recommended\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eFor a fast workflow installation, it is recommended to use \u003cstrong\u003eSingularity\u003c/strong\u003e (compatible with version 3.9.5). This bypasses the need for \u003cem\u003eConda\u003c/em\u003e to set up required environments, as these are already present within the container that will be pulled from \u003ca href=\"https://hub.docker.com/r/davidebrex/spikeflow\" rel=\"nofollow\"\u003edockerhub\u003c/a\u003e with the use of the \u003ccode\u003e--software-deployment-method conda apptainer\u003c/code\u003e flag.\u003c/p\u003e\n\u003cp\u003eTo install singularity check \u003ca href=\"https://docs.sylabs.io/guides/3.0/user-guide/installation.html\" rel=\"nofollow\"\u003eits website\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 4 - Download SpikeFlow\u003c/h3\u003e\u003ca id=\"user-content-step-4---download-spikeflow\" class=\"anchor\" aria-label=\"Permalink: Step 4 - Download SpikeFlow\" href=\"#step-4---download-spikeflow\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo obtain SpikeFlow, you have two options:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003eDownload the source code as zip file from the latest \u003ca href=\"https://github.com/DavideBrex/SpikeFlow/releases/latest\"\u003eversion\u003c/a\u003e. For example: \u003ccode\u003ewget https://github.com/DavideBrex/SpikeFlow/archive/refs/tags/v1.2.0.zip\u003c/code\u003e will download a zip file. Unzip it and move to the Spikeflow-1.2.0 folder.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eClone the repository on your local machine. See \u003ca href=\"https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository\"\u003ehere\u003c/a\u003e the instructions.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003eStep 5 - Test the workflow\u003c/h3\u003e\u003ca id=\"user-content-step-5---test-the-workflow\" class=\"anchor\" aria-label=\"Permalink: Step 5 - Test the workflow\" href=\"#step-5---test-the-workflow\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eOnce you obtained the latest version of SpikeFlow, the \u003ccode\u003econfig.yaml\u003c/code\u003e and the \u003ccode\u003esamples_sheet.csv\u003c/code\u003e files are already set to run an installation test.\nYou can open them to have an idea about their structure.\nAll the files needed for the test are in the \u003ccode\u003e.test\u003c/code\u003e folder (on ubuntu, type \u003cem\u003ectrl + h\u003c/em\u003e to see hidden files and folders).\u003c/p\u003e\n\u003cp\u003e\u003cstrong\u003eTo test whether SpikeFlow is working properly, jump directly to the \u003ca href=\"#run\"\u003eRun the workflow\u003c/a\u003e section of the documentation.\u003c/strong\u003e\u003c/p\u003e\n\u003cp\u003eThe usage of this workflow is also described in the \u003ca href=\"https://snakemake.github.io/snakemake-workflow-catalog/?usage=DavideBrex%2FSpikeFlow\" rel=\"nofollow\"\u003eSnakemake Workflow Catalog\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eConfiguration\u003c/h2\u003e\u003ca id=\"user-content-configuration\" class=\"anchor\" aria-label=\"Permalink: Configuration\" href=\"#configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003e1. \u003cstrong\u003eSample Sheet Input Requirements\u003c/strong\u003e\n\u003c/h3\u003e\u003ca id=\"user-content-1-sample-sheet-input-requirements\" class=\"anchor\" aria-label=\"Permalink: 1. Sample Sheet Input Requirements\" href=\"#1-sample-sheet-input-requirements\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003ePrior to executing the pipeline, you need to prepare a sample sheet containing detailed information about the samples to analyse. You can find an example of this file under \u003ccode\u003econfig/samples_sheet.csv\u003c/code\u003e.\nThe required format is a comma-separated values (CSV) file, consisting of eight columns and including a header row.\nFor each sample (row), you need to specify:\u003c/p\u003e\n\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003eColumn Name\u003c/th\u003e\n\u003cth\u003eDescription\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003esample\u003c/td\u003e\n\u003ctd\u003eUnique sample name\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003ereplicate\u003c/td\u003e\n\u003ctd\u003eInteger indicating the number of replicate (if no replicate simply add 1)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eantibody\u003c/td\u003e\n\u003ctd\u003eAntibody used for the experiment (leave empty for Input samples)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003econtrol\u003c/td\u003e\n\u003ctd\u003eUnique sample name of the control (it has to be specified also in the sample column, but in another row)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003econtrol_replicate\u003c/td\u003e\n\u003ctd\u003eInteger indicating the number of replicate for the control sample (if no replicate simply add 1)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003epeak_type\u003c/td\u003e\n\u003ctd\u003eCan only be equal to: narrow, broad, very-broad. It indicates the type of peak calling to perform\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003efastq_1\u003c/td\u003e\n\u003ctd\u003ePath to the fastq file of the sample (if paired-end, here goes the forward mate, i.e. R1)\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003efastq_2\u003c/td\u003e\n\u003ctd\u003eONLY for paired-end, otherwise leave empty. Path to the fastq file of the reverse mate (i.e. R2)\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\n\u003cp\u003eFor the input samples, leave empty the values of the all the columns except for sample, replicate and fastq path(s).\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eExample 1 (single end)\u003c/h4\u003e\u003ca id=\"user-content-example-1-single-end\" class=\"anchor\" aria-label=\"Permalink: Example 1 (single end)\" href=\"#example-1-single-end\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003esample\u003c/th\u003e\n\u003cth\u003ereplicate\u003c/th\u003e\n\u003cth\u003eantibody\u003c/th\u003e\n\u003cth\u003econtrol\u003c/th\u003e\n\u003cth\u003econtrol_replicate\u003c/th\u003e\n\u003cth\u003epeak_type\u003c/th\u003e\n\u003cth\u003efastq_1\u003c/th\u003e\n\u003cth\u003efastq_2\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K4me3_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003eH3K4me3\u003c/td\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003enarrow\u003c/td\u003e\n\u003ctd\u003efastq/H3K4me3_untreated-1_L1.fastq.gz\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K4me3_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003eH3K4me3\u003c/td\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003enarrow\u003c/td\u003e\n\u003ctd\u003efastq/H3K4me3_untreated-1_L2.fastq.gz\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003efastq/Input-untreated-1_fastq.gz\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e If your sample has \u003cstrong\u003emultiple lanes\u003c/strong\u003e, you can simple add a new row with the same values in all the columns except for fastq_1 (and fastq_2 if PE). In the table above, H3K4me3_untreated has two lanes\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eExample 2 (paired end)\u003c/h4\u003e\u003ca id=\"user-content-example-2-paired-end\" class=\"anchor\" aria-label=\"Permalink: Example 2 (paired end)\" href=\"#example-2-paired-end\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003esample\u003c/th\u003e\n\u003cth\u003ereplicate\u003c/th\u003e\n\u003cth\u003eantibody\u003c/th\u003e\n\u003cth\u003econtrol\u003c/th\u003e\n\u003cth\u003econtrol_replicate\u003c/th\u003e\n\u003cth\u003epeak_type\u003c/th\u003e\n\u003cth\u003efastq_1\u003c/th\u003e\n\u003cth\u003efastq_2\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K9me2_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003eH3K9me2\u003c/td\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003every-broad\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_untreated-1_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_untreated-1_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K9me2_untreated\u003c/td\u003e\n\u003ctd\u003e2\u003c/td\u003e\n\u003ctd\u003eH3K9me2\u003c/td\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003every-broad\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_untreated-2_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_untreated-2_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K9me2_EGF\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003eH3K9me2\u003c/td\u003e\n\u003ctd\u003eInput_EGF\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003every-broad\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_EGF-1_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_EGF-1_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eH3K9me2_EGF\u003c/td\u003e\n\u003ctd\u003e2\u003c/td\u003e\n\u003ctd\u003eH3K9me2\u003c/td\u003e\n\u003ctd\u003eInput_EGF\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003every-broad\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_EGF-2_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/H3K9me2_EGF-2_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eInput_untreated\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003efastq/Input-untreated-1_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/Input-untreated-1_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003eInput_EGF\u003c/td\u003e\n\u003ctd\u003e1\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003e\u003c/td\u003e\n\u003ctd\u003efastq/Input-EGF-1_R1.fastq.gz\u003c/td\u003e\n\u003ctd\u003efastq/Input-EGF-1_R2.fastq.gz\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e In this case, we have two replicates per condition (untreated and EGF) and the samples are paired-end. However, \u003cstrong\u003emixed situations (some single and some paired-end samples) are also accepted by the pipeline.\u003c/strong\u003e\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch3 class=\"heading-element\"\u003e2. \u003cstrong\u003eConfig file\u003c/strong\u003e\n\u003c/h3\u003e\u003ca id=\"user-content-2-config-file\" class=\"anchor\" aria-label=\"Permalink: 2. Config file\" href=\"#2-config-file\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eThe last step before running the workflow is to adjust the parameters in the config file (\u003ccode\u003econfig/config.yaml\u003c/code\u003e). The file is written in YAML (Yet Another Markup Language), which is a human-readable data serialization format. It contains key-value pairs that can be nested to multiple leves.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eReference and exogenous (spike-in) genomes\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-reference-and-exogenous-spike-in-genomes\" class=\"anchor\" aria-label=\"Permalink: Reference and exogenous (spike-in) genomes\" href=\"#reference-and-exogenous-spike-in-genomes\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo execute the pipeline, it\u0027s essential to specify both \u003cem\u003eendogenous\u003c/em\u003e and \u003cem\u003eexogenous\u003c/em\u003e species in the assembly field; for example, use Drosophila (dm16) as the exogenous and Human (hg38) as the endogenous species. You can find the the genome assembly on the \u003ca href=\"https://genome-euro.ucsc.edu/cgi-bin/hgGateway\" rel=\"nofollow\"\u003eUCSC Genome Browser\u003c/a\u003e.\u003c/p\u003e\n\u003cp\u003eIf a bowtie2 genome index is already available for the merged genomes (e.g. hg38 + dm16), you should input the path (ending with the index files prefix) in the \u0027resources\u0027 section of the pipeline configuration. This setup ensures proper alignment and processing of your genomic data. \u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e The index must be created with bowtie2 v2.5.3.\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003eresources\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eref\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eindex\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e/path/to/hg38_dm16_merged.bowtie2.index/indexFilesPrefix\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003e ucsc genome name (e.g. hg38, mm10, etc)\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eassembly\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003ehg38\u003c/span\u003e\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003eblacklist regions \u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eblacklist\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e.test/data/hg38-blacklist.v2.bed\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n\n \u003cspan class=\"pl-ent\"\u003eref_spike\u003c/span\u003e:\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003e ucsc genome name (e.g. dm6, mm10, etc)\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003espike_assembly\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003edm6\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eIf you don\u0027t have the bowtie2 index readily available, the pipeline will generate it for you. To do so, leave empty the index field in the resources section (see below):\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003eresources\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eref\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eindex\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003e ucsc genome name (e.g. hg38, mm10, etc)\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eassembly\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003ehg38\u003c/span\u003e\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003eblacklist regions \u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eblacklist\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e.test/data/hg38-blacklist.v2.bed\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n\n \u003cspan class=\"pl-ent\"\u003eref_spike\u003c/span\u003e:\n \u003cspan class=\"pl-c\"\u003e\u003cspan class=\"pl-c\"\u003e#\u003c/span\u003e ucsc genome name (e.g. hg38, mm10, etc)\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003espike_assembly\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003edm6\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e For the endogenous genome, it\u0027s important to also include the path to blacklisted regions. These regions, often associated with sequencing artifacts or other anomalies, can be downloaded from the Boyle Lab\u0027s Blacklist repository on GitHub. You can access these blacklisted region files \u003ca href=\"https://github.com/Boyle-Lab/Blacklist/tree/master/lists\"\u003ehere\u003c/a\u003e\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eNormalization\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-normalization\" class=\"anchor\" aria-label=\"Permalink: Normalization\" href=\"#normalization\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eIn this field you can choose the type of normalization to perform on the samples. The available options are:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eRAW\u003c/strong\u003e: This is a RPM normalization, i.e. it normalizes the read counts to the total number of reads in a sample, measured per million reads. This method is straightforward but does not account for spike-in.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eOrlando\u003c/strong\u003e: Spike-in normalization as described in \u003ca href=\"https://pubmed.ncbi.nlm.nih.gov/25437568/\" rel=\"nofollow\"\u003eOrlando et al 2014\u003c/a\u003e. Also reffered as Reference-adjusted Reads Per Million (RRPM). It does not incorporate input data in the normalization process.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eRX-Input\u003c/strong\u003e (default): RX-Input is a modified version of the Orlando normalization that accounts for the total number of reads mapped to the spike-in in both the ChIP and input samples. This approach allows for more accurate normalization by accounting for variations in both immunoprecipitation efficiency and background noise (as represented by the input). See \u003ca href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6561741/#bib42\" rel=\"nofollow\"\u003eFursova et al 2019\u003c/a\u003e for further details.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eDownsampling\u003c/strong\u003e: The sample with the minimum umber of spike-in reads is used as the reference. Sample reads from all other samples are downsampled to the same level as this reference sample. This approach is applicable to datasets where the numbers of reads are similar. See \u003ca href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8313745/\" rel=\"nofollow\"\u003eWu et al. 2021\u003c/a\u003e for further details.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eMedian Normalization\u003c/strong\u003e: Normalize to the median. All samples can be normalized to the median value of spike-in reads. This method is not suited for integrating datasets from different sources. See \u003ca href=\"https://www.ncbi.nlm.nih.gov/pmc/articles/PMC8313745/\" rel=\"nofollow\"\u003eWu et al. 2021\u003c/a\u003e for further details.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eExample:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003enormalization_type\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eOrlando\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eDifferential Peak analysis\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-differential-peak-analysis\" class=\"anchor\" aria-label=\"Permalink: Differential Peak analysis\" href=\"#differential-peak-analysis\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eSpikeFlow allows you to perform differential peaks analysis. In this case, the grouping variable for the samples will be extracted from the sample name in the \u003cem\u003esample_sheet.csv\u003c/em\u003e (after the last \u0027_\u0027). Also, if \u003ccode\u003eperform_diff_analysis: true\u003c/code\u003e, you will need to specify the contrasts (per antibody), meaning the groups that you want to compare. Please also specify the log2 fold change (log2FCcutoff) and adjusted p-value (padjust) thresholds for differential analysis.\u003c/p\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e Ensure that the group names for the differential peaks analysis and the contrast names do not contain any additional underscores (\u0027_\u0027), and that the antibody names do not contain any underscores (\u0027_\u0027).\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cp\u003eWhen differential peak analysis is enabled, SpikeFlow will create a consensus peak set per antibody and count reads on those peaks. The default behavior to build the consensus regions is to use all the peaks from all the samples (i.e., minNumSamples: 0). However, you can change this to specify the minimum number of samples a peak should be present in to be kept for the consensus peak set (minNumSamples).\u003c/p\u003e\n\u003cblockquote\u003e\n\u003cp\u003e\u003cstrong\u003e\u003cem\u003e\u003cg-emoji class=\"g-emoji\" alias=\"warning\"\u003e\u26a0\ufe0f\u003c/g-emoji\u003e NOTE:\u003c/em\u003e\u003c/strong\u003e If \u003ccode\u003euseSpikeinCalledPeaks: true\u003c/code\u003e, spike-normalized peak calling will be executed in addition to the standard peak calling. The resulting regions from the spike-normalized peak calling will be used for consensus peak set generation and differential analysis.\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"highlight highlight-source-yaml\"\u003e\u003cpre\u003e\u003cspan class=\"pl-ent\"\u003ediffPeakAnalysis\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eperform_diff_analysis\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003etrue\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003econtrasts\u003c/span\u003e:\n \u003cspan class=\"pl-ent\"\u003eH3K4me3\u003c/span\u003e:\n - \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eEGF_vs_untreated\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003epadjust\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0.01\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003elog2FCcutoff\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e1.5\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003eminNumSamples\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e1\u003c/span\u003e\n \u003cspan class=\"pl-ent\"\u003euseSpikeinCalledPeaks\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003efalse\u003c/span\u003e\u003c/pre\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eRequired options\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-required-options\" class=\"anchor\" aria-label=\"Permalink: Required options\" href=\"#required-options\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eWhen configuring your pipeline based on the chosen reference/endogenous genome (like mm10 or hg38), two essential options need to be set:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eeffective genome length\u003c/strong\u003e: This is required by deeptools to generate the bigWig files. The value of this parameter is used by the program to adjust the mappable portion of the genome, ensuring that the read densities represented in the BigWig files accurately reflect the underlying biological reality. You can find the possible values for this parameter in the deeptools \u003ca href=\"https://deeptools.readthedocs.io/en/latest/content/feature/effectiveGenomeSize.html\" rel=\"nofollow\"\u003edocumentation\u003c/a\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003echrom sizes\u003c/strong\u003e: To achieve accurate peak calling, it\u0027s important to use the correct chromosome sizes file. The supported genomes\u0027 chromosome sizes are available under \u003ccode\u003eresources/chrom_size\u003c/code\u003e. \u003cstrong\u003eMake sure to select the file that corresponds to your chosen genome\u003c/strong\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003e\u003cem\u003eOther (optional) parameters\u003c/em\u003e\u003c/h4\u003e\u003ca id=\"user-content-other-optional-parameters\" class=\"anchor\" aria-label=\"Permalink: Other (optional) parameters\" href=\"#other-optional-parameters\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003eTo direct Snakemake to save all outputs in a specific directory, add the desired path in the config file: \u003ccode\u003eoutput_path: \"path/to/directory\"\u003c/code\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eWhile splitting the BAM file into two separate ones (one endogenous and one spike-in), reads with a mapping quality below 8 are discarded. You can adjust this behavior using the bowtie2 \u003ccode\u003emap_quality\u003c/code\u003e field. If no filtering is needed, set this value to 0; otherwise, adjust it from 0 to 30 as needed. For more information on Bowtie2 MAPQ scores, see \u003ca href=\"http://biofinysics.blogspot.com/2014/05/how-does-bowtie2-assign-mapq-scores.html\" rel=\"nofollow\"\u003ehere\u003c/a\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eBroad Peak Calling:\u003c/strong\u003e For samples requiring broad peak calling, adjust the effective genome fraction as per the guidelines on this \u003ca href=\"https://github.com/biocore-ntnu/epic2/blob/master/epic2/effective_sizes/hg38_50.txt\"\u003epage\u003c/a\u003e. The \u003cem\u003e\u0027effective genome size\u0027\u003c/em\u003e mentioned on the GitHub page depends on the read length of your samples.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eVery Broad Peak Callling:\u003c/strong\u003e If you have samples that will undergo very-broad peak calling, please check the log files produced by EDD. This because the tool might fail if it can not accurately estimate the parameters for the peak calling. In this case, you can tweak the parameters in the EDD config file, which is in the config directory (\u003ccode\u003econfig/edd_parameters.conf\u003c/code\u003e). For more information about EDD parameters tuning see the \u003ca href=\"https://github.com/CollasLab/edd\"\u003edocumentation\u003c/a\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eTrimming Option:\u003c/strong\u003e Trimming can be skipped by setting the respective flag to false.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eP-Value Adjustment for Peak Calling:\u003c/strong\u003e Modify the q-values for peak calling in the config file. This applies to different peak calling methods: narrow (macs2), broad (epic2), or very-broad (edd).\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003ePeak Annotation Threshold:\u003c/strong\u003e The default setting annotates a peak within \u00b12500 bp around the promoter region.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eRun the workflow\u003c/h2\u003e\u003ca id=\"user-content-run-the-workflow\" class=\"anchor\" aria-label=\"Permalink: Run the workflow\" href=\"#run-the-workflow\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo execute the pipeline, make sure to be in the main \u003cstrong\u003eSnakemake working directory\u003c/strong\u003e, which includes subfolders like \u0027workflow\u0027, \u0027resources\u0027, and \u0027config\u0027.\u003c/p\u003e\n\u003cp\u003eThe workflow can be operated in two ways: using Conda alone, or a combination of Conda and Singularity (\u003cstrong\u003erecommended\u003c/strong\u003e).\nAfter obtaining a copy of the workflow on your machine, you can verify its proper functioning by executing one of the two commands below.\nThe \u003ccode\u003econfig\u003c/code\u003e and \u003ccode\u003esample_sheet\u003c/code\u003e files come pre-configured for a test run.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eConda and Singularity (recommended)\u003c/h4\u003e\u003ca id=\"user-content-conda-and-singularity-recommended\" class=\"anchor\" aria-label=\"Permalink: Conda and Singularity (recommended)\" href=\"#conda-and-singularity-recommended\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eSnakemake versions \u0026gt;= 8:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell\"\u003e\u003cpre\u003esnakemake --cores 10 --software-deployment-method conda apptainer\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eSnakemake versions \u0026gt;= 7.17 and \u0026lt; 8:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell\"\u003e\u003cpre\u003esnakemake -j 10 --use-conda --use-singularity\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eFirst, the singularity container will be pulled from DockerHub and then the workflow will be executed. To install sigularity, see the \u003ca href=\"#install\"\u003einstallation\u003c/a\u003e section.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eConda only\u003c/h4\u003e\u003ca id=\"user-content-conda-only\" class=\"anchor\" aria-label=\"Permalink: Conda only\" href=\"#conda-only\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eSnakemake versions \u0026gt;= 8:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell\"\u003e\u003cpre\u003esnakemake --cores 10 --software-deployment-method conda\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eSnakemake versions \u0026gt;= 7.17 and \u0026lt; 8:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell\"\u003e\u003cpre\u003esnakemake -j 10 --use-conda\u003c/pre\u003e\u003c/div\u003e\n\u003cp\u003eThis will install all the required conda envs (it might take a while, just for the first execution).\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch4 class=\"heading-element\"\u003eSnakemake flags\u003c/h4\u003e\u003ca id=\"user-content-snakemake-flags\" class=\"anchor\" aria-label=\"Permalink: Snakemake flags\" href=\"#snakemake-flags\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003ccode\u003e--cores\u003c/code\u003e or \u003ccode\u003e-j\u003c/code\u003e: indicates the number of cores. Adjust this number (here set to 10) based on your machine configuration\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003e-n\u003c/code\u003e: add this flag to the command line for a \"dry run,\" which allows Snakemake to display the rules that it would execute, without actually running them.\u003c/li\u003e\n\u003cli\u003e\n\u003ccode\u003e--singularity-args \"-B /shares,/home -e\"\u003c/code\u003e: add this flag only with \u003ccode\u003e--software-deployment-method conda apptainer\u003c/code\u003e or \u003ccode\u003e--use-conda --use-singularity\u003c/code\u003e. It will allow singularity to mount the specified disks (/shares and /home), in this case should be where you have your working dir and files.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eTo execute the pipeline on a HPC cluster, please follow \u003ca href=\"https://snakemake.readthedocs.io/en/stable/tutorial/additional_features.html#cluster-execution\" rel=\"nofollow\"\u003ethese guidelines\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eOutput files\u003c/h2\u003e\u003ca id=\"user-content-output-files\" class=\"anchor\" aria-label=\"Permalink: Output files\" href=\"#output-files\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eAll the outputs of the workflow are stored in the \u003ccode\u003eresults\u003c/code\u003e folder. Additionally, in case of any errors during the workflow execution, the log files are stored within the \u003ccode\u003eresults/logs\u003c/code\u003e directory.\u003c/p\u003e\n\u003cp\u003eThe main outputs of the workflow are:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eMultiQC Report\u003c/strong\u003e\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eIf the differential peaks analysis was activated, you will find scatter plots/volcano and PCA plots in the report.\u003c/li\u003e\n\u003cli\u003ePeak Calling Data: Displays the number of peaks called per sample for each method (MACS2, EPIC2, EDD).\u003c/li\u003e\n\u003cli\u003ePeaks annotation\u003c/li\u003e\n\u003cli\u003eReads Table: Per sample reference and spike-in calculated with the normalisation set by the user.\u003c/li\u003e\n\u003cli\u003eBasic QC with FastQC: Evaluates basic quality metrics of the sequencing data.\u003c/li\u003e\n\u003cli\u003ePhantom Peak Qual Tools: Provides NSC and RSC values, indicating the quality and reproducibility of ChIP samples. NSC measures signal-to-noise ratio, while RSC assesses enrichment strength.\u003c/li\u003e\n\u003cli\u003eFingerprint Plots: Visual representation of the sample quality, showing how reads are distributed across the genome.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003e\u003ccode\u003eresults/QC/multiqc/multiqc_report.html\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003ePeaks Differential Analysis\u003c/strong\u003e\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eIn this folder, you will find the differential peak regions and the volcano/scatter/pca plots for each antibody and contrast.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003e\u003ccode\u003eresults/differentialAnalysis\u003c/code\u003e\u003c/p\u003e\n\u003cp\u003eIf spike-in normalised peak calling was activated, you will find the results of the differential analysis in:\u003c/p\u003e\n\u003cp\u003e\u003ccode\u003eresults/differentialAnalysis/NormalisedPeaks\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003eNormalized BigWig Files\u003c/strong\u003e:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eEssential for visualizing read distribution and creating detailed heatmaps.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003e\u003ccode\u003eresults/bigWigs/\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003e\u003cstrong\u003ePeak Files and Annotation\u003c/strong\u003e:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cp\u003eProvides called peaks for each peak calling method. Consensus regions bed files are in \u003ccode\u003e/results/peakCalling/mergedPeaks\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003ePeak annotation using ChIPseeker, resulting in two files for promoter and distal peaks for each sample \u003ccode\u003e/results/peakCalling/peakAnnot\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eStandard peak calling: \u003ccode\u003e/results/peakCalling/\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eSpike-in normalised peak calling: \u003ccode\u003e/results/peakCallingNorm/\u003c/code\u003e\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eTroubleshooting\u003c/h2\u003e\u003ca id=\"user-content-troubleshooting\" class=\"anchor\" aria-label=\"Permalink: Troubleshooting\" href=\"#troubleshooting\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003col\u003e\n\u003cli\u003e\n\u003cp\u003eWhen you run SpikeFlow with Singularity ( \u003ccode\u003e--software-deployment-method conda apptainer\u003c/code\u003e), you might get an error if you set the \u003ccode\u003e-n\u003c/code\u003e flag. This happens ONLY at the first execution of the workflow. Remove the flag and it should work.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eThe \u003ccode\u003e--software-deployment-method conda apptainer\u003c/code\u003e option temporary requires about 7 GB of disk space to build the image from Docker Hub. If your \u003ccode\u003e/tmp\u003c/code\u003e directory is full, you\u0027ll encounter a \u003ccode\u003eNo space left on device\u003c/code\u003e error. To avoid this, change the Singularity temp directory to a different disk by setting the \u003ccode\u003eSINGULARITY_TMPDIR\u003c/code\u003e environment variable. More details are available in the \u003ca href=\"https://docs.sylabs.io/guides/latest/user-guide/build_env.html#temporary-folders\" rel=\"nofollow\"\u003eSingularity guide on temporary folders\u003c/a\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eIn case of errors during the execution, please make sure to check the log files of the failing snakemake rule in the log folders\u003c/p\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp\u003eFor the R scripts execution in SpikeFlow, the env variable R_LIBS_SITE has to be empty otherwise snakemake will look in that folder for R libraries. To avoid this you can use \u003ccode\u003eunset R_LIBS_SITE\u003c/code\u003e.\u003c/p\u003e\n\u003c/li\u003e\n\u003c/ol\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eCitation\u003c/h2\u003e\u003ca id=\"user-content-citation\" class=\"anchor\" aria-label=\"Permalink: Citation\" href=\"#citation\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eIf you use this workflow in a paper, don\u0027t forget to give credits to the authors by citing the URL of this (original) repository:\n\u003ca href=\"https://github.com/DavideBrex/SpikeFlow\"\u003ehttps://github.com/DavideBrex/SpikeFlow\u003c/a\u003e\u003c/p\u003e\n\u003cp\u003e\u003cstrong\u003eAuthor\u003c/strong\u003e:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eDavide Bressan (\u003ca href=\"https://twitter.com/BrexDavide\" rel=\"nofollow\"\u003e@DavideBrex\u003c/a\u003e)\u003c/li\u003e\n\u003c/ul\u003e\n", - "data_format": 2, - "description": "Pipeline to analyse ChIP-Rx data, i.e ChIP-Seq with reference exogenous genome spike-in normalization", - "formatting": null, - "full_name": "DavideBrex/SpikeFlow", - "latest_release": "v1.3.0", - "linting": null, - "mandatory_flags": { - "desc": null, - "flags": null - }, - "report": true, - "software_stack_deployment": { - "conda": true, - "singularity": true, - "singularity+conda": true - }, - "standardized": true, - "stargazers_count": 7, - "subscribers_count": 1, - "topics": [ - "bioinformatics", - "bioinformatics-pipeline", - "chip-seq", - "normalisation", - "snakemake", - "snakemake-workflows", - "spike-ins" - ], - "updated_at": 1725893321.0 - }, { "config_readme": null, "data_format": 2, @@ -53318,119 +53318,6 @@ var data = "topics": [], "updated_at": 1618875925.0 }, - { - "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eConfiguration\u003c/h1\u003e\u003ca id=\"user-content-configuration\" class=\"anchor\" aria-label=\"Permalink: Configuration\" href=\"#configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eYou need one configuration file and one annotation file to run the complete workflow. You can use the provided example as starting point. If in doubt read the comments in the config and/or try the default values.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eproject configuration (\u003ccode\u003econfig/config.yaml\u003c/code\u003e): different for every project/dataset and configures the analyses to be performed.\u003c/li\u003e\n\u003cli\u003esample annotation (sample_annotation): CSV file consisting of five columns\n\u003cul\u003e\n\u003cli\u003ename: name of the dataset/analysis (tip: keep it short, but descriptive and distinctive).\u003c/li\u003e\n\u003cli\u003edata: path to the input Seurat object as .rds.\u003c/li\u003e\n\u003cli\u003eassay: the Seurat assay to be used (e.g., SCT or RNA).\u003c/li\u003e\n\u003cli\u003emetadata: column name of the metadata that should be used to group cells for comparison (e.g., condition or cell_type).\u003c/li\u003e\n\u003cli\u003econtrol: name of the class/level that should be used as control in the comparison (e.g., untreated) or \"ALL\" to compare every class against the rest (e.g., useful to find cluster markers; one vs all)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eSet workflow-specific \u003ccode\u003eresources\u003c/code\u003e or command line arguments (CLI) in the workflow profile \u003ccode\u003eworkflow/profiles/default.config.yaml\u003c/code\u003e, which supersedes global Snakemake profiles.\u003c/p\u003e\n", - "data_format": 2, - "description": "A Snakemake workflow for performing differential expression analyses (DEA) on (multimodal) sc/snRNA-seq data powered by the R package Seurat.", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/rules/visualize.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/rules/dea.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/rules/envs_export.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/Snakefile\": Formatted content is different from original\n[INFO] 5 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "epigen/dea_seurat", - "latest_release": "v2.0.0", - "linting": "Using workflow specific profile workflow/profiles/default for setting default command line arguments.\nFileNotFoundError in file /tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/Snakefile, line 25:\n[Errno 2] No such file or directory: \u0027/path/to/MyData_dea_seurat_annotation.csv\u0027\n File \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/Snakefile\", line 25, in \u003cmodule\u003e\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1026, in read_csv\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 620, in _read\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1620, in __init__\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1880, in _make_engine\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/common.py\", line 873, in get_handle\n", - "mandatory_flags": null, - "report": true, - "software_stack_deployment": { - "conda": true - }, - "standardized": true, - "stargazers_count": 11, - "subscribers_count": 5, - "topics": [ - "bioinformatics", - "biomedical-data-science", - "differential-expression-analysis", - "scrna-seq", - "single-cell", - "snakemake", - "snrna-seq", - "visualization", - "workflow", - "volcano-plot" - ], - "updated_at": 1726240463.0 - }, - { - "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eConfiguration\u003c/h1\u003e\u003ca id=\"user-content-configuration\" class=\"anchor\" aria-label=\"Permalink: Configuration\" href=\"#configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eYou need one configuration file and one annotation file to run the complete workflow. You can use the provided example as starting point. If in doubt read the comments in the config and/or try the default values.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eproject configuration (config/config.yaml): different for every project/dataset and configures the analyses to be performed\u003c/li\u003e\n\u003cli\u003esample annotation (sample_annotation): CSV file consisting of three columns\n\u003cul\u003e\n\u003cli\u003esample_name: name of the sample (tip: keep it short, but unique)\u003c/li\u003e\n\u003cli\u003epath (2 options):\n\u003cul\u003e\n\u003cli\u003e10X Genomics output: path to the directory containing the Cell Ranger output folder filtered_feature_bc_matrix/\u003c/li\u003e\n\u003cli\u003eMTX files: path to the directory containing the following 3 files:\n\u003cul\u003e\n\u003cli\u003ematrix.mtx containing the counts\u003c/li\u003e\n\u003cli\u003ebarcodes.tsv containing the cell barcodes in the first column without header (TSV)\u003c/li\u003e\n\u003cli\u003efeatures.tsv containing the feature/gene names in the first column without header (TSV)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003emetadata (optional): path to sample metadata as CSV with the first column being cell barcodes and every other coloumn metadata for the respective barcode/cell\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eSet workflow-specific \u003ccode\u003eresources\u003c/code\u003e or command line arguments (CLI) in the workflow profile \u003ccode\u003eworkflow/profiles/default.config.yaml\u003c/code\u003e, which supersedes global Snakemake profiles.\u003c/p\u003e\n", - "data_format": 2, - "description": "A Snakemake workflow for processing and visualizing (multimodal) sc/snRNA-seq data generated with 10X Genomics Kits or in the MTX matrix file format powered by the R package Seurat.", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/normalize_correct_score.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/envs_export.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/process.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/visualize.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/Snakefile\": Formatted content is different from original\n[INFO] 6 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "epigen/scrnaseq_processing_seurat", - "latest_release": "v2.0.0", - "linting": "Using workflow specific profile workflow/profiles/default for setting default command line arguments.\nFileNotFoundError in file /tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/Snakefile, line 24:\n[Errno 2] No such file or directory: \u0027/path/to/scrnaseq_processing_annotation.csv\u0027\n File \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/Snakefile\", line 24, in \u003cmodule\u003e\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1026, in read_csv\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 620, in _read\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1620, in __init__\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1880, in _make_engine\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/common.py\", line 873, in get_handle\n", - "mandatory_flags": null, - "report": true, - "software_stack_deployment": { - "conda": true - }, - "standardized": true, - "stargazers_count": 11, - "subscribers_count": 6, - "topics": [ - "bioinformatics", - "workflow", - "snakemake", - "scrna-seq", - "biomedical-data-science", - "cite-seq", - "sccrispr-seq", - "10xgenomics", - "snrna-seq", - "single-cell", - "visualization" - ], - "updated_at": 1726239656.0 - }, - { - "config_readme": "\u003cp\u003ePlease follow the instructions in the template config.yaml that is copied to your deployment via snakedeploy.\u003c/p\u003e\n", - "data_format": 2, - "description": "A snakemake workflow for benchmarking variant calling approaches with Genome in a Bottle (GIAB), CHM (syndip) or other custom datasets", - "formatting": null, - "full_name": "snakemake-workflows/dna-seq-benchmark", - "latest_release": "v1.11.7", - "linting": null, - "mandatory_flags": { - "desc": null, - "flags": null - }, - "report": true, - "software_stack_deployment": { - "conda": true, - "singularity": false, - "singularity+conda": false - }, - "standardized": true, - "stargazers_count": 11, - "subscribers_count": 1, - "topics": [], - "updated_at": 1726144459.0 - }, - { - "config_readme": null, - "data_format": 2, - "description": "Snakemake-based computational workflow for neoantigen prediction from diverse sources", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/quantification.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\": InvalidPython: Black error:\n```\nCannot parse: 66:0: else:\n\n(Note reported line number may be incorrect, as snakefmt could not determine the true line number)\n```\n\n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\": \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/hlatyping.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/genefusion.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/germline.smk\": InvalidParameterSyntax: 176resources={\n\"hapmap\": {\"known\": False , \"training\": True , \"truth\": True , \"prior\": 15.0} ,\n\"omni\": {\"known\": False , \"training\": True , \"truth\": True , \"prior\": 12.0} ,\n\"g1k\": {\"known\": False , \"training\": True , \"truth\": True , \"prior\": 10.0} ,\n\n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/germline.smk\": \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/exitron.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/custom.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/ref.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/prelim.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/annotation.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/altsplicing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/preproc.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/indel.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/prioritization.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/align.smk\": Formatted content is different from original\n[INFO] 2 file(s) raised parsing errors \ud83e\udd15\n[INFO] 14 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "ylab-hi/ScanNeo2", - "latest_release": "v0.3.0", - "linting": "Traceback (most recent call last):\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/cli.py\", line 2024, in args_to_api\n any_lint = workflow_api.lint()\n ^^^^^^^^^^^^^^^^^^^\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/api.py\", line 337, in _handle_no_dag\n return method(self, *args, **kwargs)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/api.py\", line 354, in lint\n workflow.include(\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/workflow.py\", line 1412, in include\n exec(compile(code, snakefile.get_path_or_uri(), \"exec\"), self.globals)\n\n File \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/Snakefile\", line 30, in \u003cmodule\u003e\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/workflow.py\", line 1412, in include\n exec(compile(code, snakefile.get_path_or_uri(), \"exec\"), self.globals)\n\n File \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\", line 126, in \u003cmodule\u003e\n config[\u0027data\u0027] = data_structure(config[\u0027data\u0027])\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n File \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\", line 8, in data_structure\n config[\u0027data\u0027][\u0027dnaseq\u0027], filetype, readtype = handle_seqfiles(config[\u0027data\u0027][\u0027dnaseq\u0027])\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n File \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\", line 64, in handle_seqfiles\n return mod_seqdata, filetype[0], readtype[0]\n ^^^^^^^^^^^^^^\n\nIndexError: list index out of range\n\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 11, - "subscribers_count": 3, - "topics": [ - "epitope", - "gene-fusion", - "indels", - "neoantigens", - "peptide", - "snakemake", - "snakemake-workflow", - "splicing", - "exitron", - "neoepitope" - ], - "updated_at": 1725251889.0 - }, { "config_readme": null, "data_format": 2, @@ -53453,6 +53340,119 @@ var data = ], "updated_at": 1725994080.0 }, + { + "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eConfiguration\u003c/h1\u003e\u003ca id=\"user-content-configuration\" class=\"anchor\" aria-label=\"Permalink: Configuration\" href=\"#configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eYou need one configuration file and one annotation file to run the complete workflow. You can use the provided example as starting point. If in doubt read the comments in the config and/or try the default values.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eproject configuration (\u003ccode\u003econfig/config.yaml\u003c/code\u003e): different for every project/dataset and configures the analyses to be performed.\u003c/li\u003e\n\u003cli\u003esample annotation (sample_annotation): CSV file consisting of five columns\n\u003cul\u003e\n\u003cli\u003ename: name of the dataset/analysis (tip: keep it short, but descriptive and distinctive).\u003c/li\u003e\n\u003cli\u003edata: path to the input Seurat object as .rds.\u003c/li\u003e\n\u003cli\u003eassay: the Seurat assay to be used (e.g., SCT or RNA).\u003c/li\u003e\n\u003cli\u003emetadata: column name of the metadata that should be used to group cells for comparison (e.g., condition or cell_type).\u003c/li\u003e\n\u003cli\u003econtrol: name of the class/level that should be used as control in the comparison (e.g., untreated) or \"ALL\" to compare every class against the rest (e.g., useful to find cluster markers; one vs all)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eSet workflow-specific \u003ccode\u003eresources\u003c/code\u003e or command line arguments (CLI) in the workflow profile \u003ccode\u003eworkflow/profiles/default.config.yaml\u003c/code\u003e, which supersedes global Snakemake profiles.\u003c/p\u003e\n", + "data_format": 2, + "description": "A Snakemake workflow for performing differential expression analyses (DEA) on (multimodal) sc/snRNA-seq data powered by the R package Seurat.", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/rules/visualize.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/rules/dea.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/rules/envs_export.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/Snakefile\": Formatted content is different from original\n[INFO] 5 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "epigen/dea_seurat", + "latest_release": "v2.0.0", + "linting": "Using workflow specific profile workflow/profiles/default for setting default command line arguments.\nFileNotFoundError in file /tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/Snakefile, line 25:\n[Errno 2] No such file or directory: \u0027/path/to/MyData_dea_seurat_annotation.csv\u0027\n File \"/tmp/tmpqm8kd3ip/epigen-dea_seurat-eaf7369/workflow/Snakefile\", line 25, in \u003cmodule\u003e\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1026, in read_csv\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 620, in _read\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1620, in __init__\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1880, in _make_engine\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/common.py\", line 873, in get_handle\n", + "mandatory_flags": null, + "report": true, + "software_stack_deployment": { + "conda": true + }, + "standardized": true, + "stargazers_count": 11, + "subscribers_count": 5, + "topics": [ + "bioinformatics", + "biomedical-data-science", + "differential-expression-analysis", + "scrna-seq", + "single-cell", + "snakemake", + "snrna-seq", + "visualization", + "workflow", + "volcano-plot" + ], + "updated_at": 1726240463.0 + }, + { + "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eConfiguration\u003c/h1\u003e\u003ca id=\"user-content-configuration\" class=\"anchor\" aria-label=\"Permalink: Configuration\" href=\"#configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eYou need one configuration file and one annotation file to run the complete workflow. You can use the provided example as starting point. If in doubt read the comments in the config and/or try the default values.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eproject configuration (config/config.yaml): different for every project/dataset and configures the analyses to be performed\u003c/li\u003e\n\u003cli\u003esample annotation (sample_annotation): CSV file consisting of three columns\n\u003cul\u003e\n\u003cli\u003esample_name: name of the sample (tip: keep it short, but unique)\u003c/li\u003e\n\u003cli\u003epath (2 options):\n\u003cul\u003e\n\u003cli\u003e10X Genomics output: path to the directory containing the Cell Ranger output folder filtered_feature_bc_matrix/\u003c/li\u003e\n\u003cli\u003eMTX files: path to the directory containing the following 3 files:\n\u003cul\u003e\n\u003cli\u003ematrix.mtx containing the counts\u003c/li\u003e\n\u003cli\u003ebarcodes.tsv containing the cell barcodes in the first column without header (TSV)\u003c/li\u003e\n\u003cli\u003efeatures.tsv containing the feature/gene names in the first column without header (TSV)\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003emetadata (optional): path to sample metadata as CSV with the first column being cell barcodes and every other coloumn metadata for the respective barcode/cell\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eSet workflow-specific \u003ccode\u003eresources\u003c/code\u003e or command line arguments (CLI) in the workflow profile \u003ccode\u003eworkflow/profiles/default.config.yaml\u003c/code\u003e, which supersedes global Snakemake profiles.\u003c/p\u003e\n", + "data_format": 2, + "description": "A Snakemake workflow for processing and visualizing (multimodal) sc/snRNA-seq data generated with 10X Genomics Kits or in the MTX matrix file format powered by the R package Seurat.", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/normalize_correct_score.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/envs_export.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/process.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/rules/visualize.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/Snakefile\": Formatted content is different from original\n[INFO] 6 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "epigen/scrnaseq_processing_seurat", + "latest_release": "v2.0.0", + "linting": "Using workflow specific profile workflow/profiles/default for setting default command line arguments.\nFileNotFoundError in file /tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/Snakefile, line 24:\n[Errno 2] No such file or directory: \u0027/path/to/scrnaseq_processing_annotation.csv\u0027\n File \"/tmp/tmprvpe7bhh/epigen-scrnaseq_processing_seurat-30514ef/workflow/Snakefile\", line 24, in \u003cmodule\u003e\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1026, in read_csv\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 620, in _read\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1620, in __init__\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1880, in _make_engine\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/common.py\", line 873, in get_handle\n", + "mandatory_flags": null, + "report": true, + "software_stack_deployment": { + "conda": true + }, + "standardized": true, + "stargazers_count": 11, + "subscribers_count": 6, + "topics": [ + "bioinformatics", + "workflow", + "snakemake", + "scrna-seq", + "biomedical-data-science", + "cite-seq", + "sccrispr-seq", + "10xgenomics", + "snrna-seq", + "single-cell", + "visualization" + ], + "updated_at": 1726239656.0 + }, + { + "config_readme": "\u003cp\u003ePlease follow the instructions in the template config.yaml that is copied to your deployment via snakedeploy.\u003c/p\u003e\n", + "data_format": 2, + "description": "A snakemake workflow for benchmarking variant calling approaches with Genome in a Bottle (GIAB), CHM (syndip) or other custom datasets", + "formatting": null, + "full_name": "snakemake-workflows/dna-seq-benchmark", + "latest_release": "v1.11.7", + "linting": null, + "mandatory_flags": { + "desc": null, + "flags": null + }, + "report": true, + "software_stack_deployment": { + "conda": true, + "singularity": false, + "singularity+conda": false + }, + "standardized": true, + "stargazers_count": 11, + "subscribers_count": 1, + "topics": [], + "updated_at": 1726144459.0 + }, + { + "config_readme": null, + "data_format": 2, + "description": "Snakemake-based computational workflow for neoantigen prediction from diverse sources", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/quantification.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\": InvalidPython: Black error:\n```\nCannot parse: 66:0: else:\n\n(Note reported line number may be incorrect, as snakefmt could not determine the true line number)\n```\n\n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\": \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/hlatyping.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/genefusion.smk\": Formatted content is different from original\n[DEBUG] \n[ERROR] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/germline.smk\": InvalidParameterSyntax: 176resources={\n\"hapmap\": {\"known\": False , \"training\": True , \"truth\": True , \"prior\": 15.0} ,\n\"omni\": {\"known\": False , \"training\": True , \"truth\": True , \"prior\": 12.0} ,\n\"g1k\": {\"known\": False , \"training\": True , \"truth\": True , \"prior\": 10.0} ,\n\n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/germline.smk\": \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/exitron.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/custom.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/ref.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/prelim.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/annotation.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/altsplicing.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/preproc.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/indel.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/prioritization.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/align.smk\": Formatted content is different from original\n[INFO] 2 file(s) raised parsing errors \ud83e\udd15\n[INFO] 14 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "ylab-hi/ScanNeo2", + "latest_release": "v0.3.0", + "linting": "Traceback (most recent call last):\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/cli.py\", line 2024, in args_to_api\n any_lint = workflow_api.lint()\n ^^^^^^^^^^^^^^^^^^^\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/api.py\", line 337, in _handle_no_dag\n return method(self, *args, **kwargs)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/api.py\", line 354, in lint\n workflow.include(\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/workflow.py\", line 1412, in include\n exec(compile(code, snakefile.get_path_or_uri(), \"exec\"), self.globals)\n\n File \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/Snakefile\", line 30, in \u003cmodule\u003e\n\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/snakemake/workflow.py\", line 1412, in include\n exec(compile(code, snakefile.get_path_or_uri(), \"exec\"), self.globals)\n\n File \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\", line 126, in \u003cmodule\u003e\n config[\u0027data\u0027] = data_structure(config[\u0027data\u0027])\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n File \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\", line 8, in data_structure\n config[\u0027data\u0027][\u0027dnaseq\u0027], filetype, readtype = handle_seqfiles(config[\u0027data\u0027][\u0027dnaseq\u0027])\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n File \"/tmp/tmp1rnuacfn/ylab-hi-ScanNeo2-8233565/workflow/rules/common.smk\", line 64, in handle_seqfiles\n return mod_seqdata, filetype[0], readtype[0]\n ^^^^^^^^^^^^^^\n\nIndexError: list index out of range\n\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 11, + "subscribers_count": 3, + "topics": [ + "epitope", + "gene-fusion", + "indels", + "neoantigens", + "peptide", + "snakemake", + "snakemake-workflow", + "splicing", + "exitron", + "neoepitope" + ], + "updated_at": 1725251889.0 + }, { "config_readme": null, "data_format": 2, @@ -54137,6 +54137,23 @@ var data = ], "updated_at": 1620029618.0 }, + { + "config_readme": null, + "data_format": 2, + "description": "A metatranscriptomic pipeline optimized for the study of microeukaryotes.", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp7kwvm82s/AlexanderLabWHOI-eukrhythmic-06b7f63/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "AlexanderLabWHOI/eukrhythmic", + "latest_release": "v1.0", + "linting": "FileNotFoundError in file /tmp/tmp7kwvm82s/AlexanderLabWHOI-eukrhythmic-06b7f63/Snakefile, line 15:\n[Errno 2] No such file or directory: \u0027input/metaT_cohen.txt\u0027\n File \"/tmp/tmp7kwvm82s/AlexanderLabWHOI-eukrhythmic-06b7f63/Snakefile\", line 15, in \u003cmodule\u003e\n File \"/tmp/tmp7kwvm82s/AlexanderLabWHOI-eukrhythmic-06b7f63/scripts/importworkspace.py\", line 77, in \u003cmodule\u003e\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1026, in read_csv\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 620, in _read\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1620, in __init__\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1880, in _make_engine\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/common.py\", line 873, in get_handle\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 15, + "subscribers_count": 5, + "topics": [], + "updated_at": 1726022302.0 + }, { "config_readme": null, "data_format": 2, @@ -54222,23 +54239,6 @@ var data = "topics": [], "updated_at": 1722501345.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "A metatranscriptomic pipeline optimized for the study of microeukaryotes.", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp7kwvm82s/AlexanderLabWHOI-eukrhythmic-06b7f63/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "AlexanderLabWHOI/eukrhythmic", - "latest_release": "v1.0", - "linting": "FileNotFoundError in file /tmp/tmp7kwvm82s/AlexanderLabWHOI-eukrhythmic-06b7f63/Snakefile, line 15:\n[Errno 2] No such file or directory: \u0027input/metaT_cohen.txt\u0027\n File \"/tmp/tmp7kwvm82s/AlexanderLabWHOI-eukrhythmic-06b7f63/Snakefile\", line 15, in \u003cmodule\u003e\n File \"/tmp/tmp7kwvm82s/AlexanderLabWHOI-eukrhythmic-06b7f63/scripts/importworkspace.py\", line 77, in \u003cmodule\u003e\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1026, in read_csv\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 620, in _read\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1620, in __init__\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/parsers/readers.py\", line 1880, in _make_engine\n File \"/home/runner/micromamba-root/envs/snakemake-workflow-catalog/lib/python3.12/site-packages/pandas/io/common.py\", line 873, in get_handle\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 15, - "subscribers_count": 5, - "topics": [], - "updated_at": 1726022302.0 - }, { "config_readme": null, "data_format": 2, @@ -54527,6 +54527,23 @@ var data = "topics": [], "updated_at": 1647154249.0 }, + { + "config_readme": null, + "data_format": 2, + "description": "Pipeline for Cut\u0026Tag analysis ", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp1o2_0uuh/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "maxsonBraunLab/cutTag-pipeline", + "latest_release": null, + "linting": "ModuleNotFoundError in file /tmp/tmp1o2_0uuh/Snakefile, line 8:\nNo module named \u0027plotly\u0027\n File \"/tmp/tmp1o2_0uuh/Snakefile\", line 8, in \u003cmodule\u003e\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 18, + "subscribers_count": 5, + "topics": [], + "updated_at": 1725920408.0 + }, { "config_readme": null, "data_format": 2, @@ -54558,23 +54575,6 @@ var data = ], "updated_at": 1721743003.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "Pipeline for Cut\u0026Tag analysis ", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmp1o2_0uuh/Snakefile\": Formatted content is different from original\n[INFO] 1 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "maxsonBraunLab/cutTag-pipeline", - "latest_release": null, - "linting": "ModuleNotFoundError in file /tmp/tmp1o2_0uuh/Snakefile, line 8:\nNo module named \u0027plotly\u0027\n File \"/tmp/tmp1o2_0uuh/Snakefile\", line 8, in \u003cmodule\u003e\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 18, - "subscribers_count": 5, - "topics": [], - "updated_at": 1725920408.0 - }, { "config_readme": null, "data_format": 2, @@ -54702,6 +54702,29 @@ var data = "topics": [], "updated_at": 1684328624.0 }, + { + "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eGeneral settings\u003c/h1\u003e\u003ca id=\"user-content-general-settings\" class=\"anchor\" aria-label=\"Permalink: General settings\" href=\"#general-settings\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo configure this workflow, modify \u003ccode\u003econfig/config.yaml\u003c/code\u003e according to your\nneeds, following the explanations provided in the file.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eSample sheet\u003c/h2\u003e\u003ca id=\"user-content-sample-sheet\" class=\"anchor\" aria-label=\"Permalink: Sample sheet\" href=\"#sample-sheet\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eThe sample sheet contains all samples to be analyzed by UnCoVar.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eAuto filling\u003c/h2\u003e\u003ca id=\"user-content-auto-filling\" class=\"anchor\" aria-label=\"Permalink: Auto filling\" href=\"#auto-filling\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eUnCoVar offers the possibility to automatically append samples to the sample\nsheet. To load your data into the workflow execute\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003esnakemake --cores all --use-conda update_sample\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003ewith the root of the UnCoVar as working directory. It is recommended to use\nthe following structure to when adding data automatically:\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003e\u251c\u2500\u2500 archive\n\u251c\u2500\u2500 incoming\n\u2514\u2500\u2500 snakemake-workflow-sars-cov2\n \u251c\u2500\u2500 data\n \u2514\u2500\u2500 ...\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eHowever, this structure is not set in stone and can be adjusted via the\n\u003ccode\u003econfig/config.yaml\u003c/code\u003e file under \u003ccode\u003edata-handling\u003c/code\u003e. Only the following path to the\ncorresponding folders, relative to the directory of UnCoVar are needed:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cstrong\u003eincoming\u003c/strong\u003e: path of incoming data, which is moved to the data directory by\nthe preprocessing script. Defaults to \u003ccode\u003e../incoming/\u003c/code\u003e.\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003edata\u003c/strong\u003e: path to store data within the workflow. defaults to \u003ccode\u003edata/\u003c/code\u003e.\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003earchive\u003c/strong\u003e: path to archive data from the results from the analysis to.\nDefaults to \u003ccode\u003e../archive/\u003c/code\u003e.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eThe incoming directory should contain paired end reads in (compressed) FASTQ\nformat. UnCoVar automatically copies your data into the data directory and moves\nall files from incoming directory to the archive. After the analysis, all results\nare compressed and saved alongside the reads.\u003c/p\u003e\n\u003cp\u003eMoreover, the sample sheet is automatically updated with the new files. Please\nnote, that only the part of the filename before the first \u0027_\u0027 character is used\nas the sample name within the workflow.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eManual filling\u003c/h2\u003e\u003ca id=\"user-content-manual-filling\" class=\"anchor\" aria-label=\"Permalink: Manual filling\" href=\"#manual-filling\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eOf course, samples to be analyzed can also be added manually to the sample sheet.\nFor each sample, the a new line in \u003ccode\u003econfig/pep/samples.csv\u003c/code\u003e with the following\ncontent has to be defined:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cstrong\u003esample_name\u003c/strong\u003e: name or identifier of sample\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003efq1\u003c/strong\u003e: path to read 1 in FASTQ format\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003efq2\u003c/strong\u003e: path to read 2 in FASTQ format\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003edate\u003c/strong\u003e: sampling date of the sample\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003eis_amplicon_data\u003c/strong\u003e: indicates whether the data was generated with a\nshotgun (0) or amplicon (1) sequencing\u003c/li\u003e\n\u003c/ul\u003e\n", + "data_format": 2, + "description": "Transparent and robust SARS-CoV-2 variant calling and lineage assignment with comprehensive reporting.", + "formatting": "[DEBUG] \n[DEBUG] \n[DEBUG] \n[WARNING] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/qc.smk\": Keyword \"input\" at line 38 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/qc.smk\": Keyword \"input\" at line 62 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] \n[DEBUG] \n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\S\u0027\n[DEBUG] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/ref.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n\u003cunknown\u003e:2: SyntaxWarning: invalid escape sequence \u0027\\#\u0027\n[DEBUG] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/variant_calling.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/generate_output.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[INFO] 4 file(s) would be changed \ud83d\ude2c\n[INFO] 17 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", + "full_name": "IKIM-Essen/uncovar", + "latest_release": "v1.0.0", + "linting": null, + "mandatory_flags": null, + "report": false, + "software_stack_deployment": { + "conda": true + }, + "standardized": true, + "stargazers_count": 20, + "subscribers_count": 3, + "topics": [ + "sars-cov-2", + "variant-calling", + "lineage-assignment" + ], + "updated_at": 1726068377.0 + }, { "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eConfiguration\u003c/h1\u003e\u003ca id=\"user-content-configuration\" class=\"anchor\" aria-label=\"Permalink: Configuration\" href=\"#configuration\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eYou need one configuration file to configure the analyses and one annotation file describing the data to run the complete workflow. If in doubt read the comments in the config and/or try the default values. We provide a full example including data and configuration in \u003ccode\u003etest/\u003c/code\u003e as a starting point.\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003eproject configuration (\u003ccode\u003econfig/config.yaml\u003c/code\u003e): Different for every project and configures the analyses to be performed.\u003c/li\u003e\n\u003cli\u003esample annotation (annotation): CSV file consisting of four mandatory columns.\n\u003cul\u003e\n\u003cli\u003ename: A unique name for the dataset (tip: keep it short but descriptive).\u003c/li\u003e\n\u003cli\u003edata: Path to the tabular data as a comma-separated table (CSV).\u003c/li\u003e\n\u003cli\u003emetadata: Path to the metadata as a comma-separated table (CSV) with the first column being the index/identifier of each observation/sample and every other column metadata for the respective observation (either numeric or categorical, not mixed). \u003cstrong\u003eNo NaN or empty values allowed.\u003c/strong\u003e\n\u003c/li\u003e\n\u003cli\u003esamples_by_features: Boolean indicator if the data matrix is observations/samples (rows) x features (columns): 0==no, 1==yes.\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eSpecify workflow-specific \u003ccode\u003eresources\u003c/code\u003e or command line arguments (CLI) in the workflow profile \u003ccode\u003eworkflow/profiles/default.config.yaml\u003c/code\u003e, which supersedes global Snakemake profiles.\u003c/p\u003e\n", "data_format": 2, @@ -54741,29 +54764,6 @@ var data = ], "updated_at": 1726234698.0 }, - { - "config_readme": "\u003cdiv class=\"markdown-heading\"\u003e\u003ch1 class=\"heading-element\"\u003eGeneral settings\u003c/h1\u003e\u003ca id=\"user-content-general-settings\" class=\"anchor\" aria-label=\"Permalink: General settings\" href=\"#general-settings\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eTo configure this workflow, modify \u003ccode\u003econfig/config.yaml\u003c/code\u003e according to your\nneeds, following the explanations provided in the file.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eSample sheet\u003c/h2\u003e\u003ca id=\"user-content-sample-sheet\" class=\"anchor\" aria-label=\"Permalink: Sample sheet\" href=\"#sample-sheet\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eThe sample sheet contains all samples to be analyzed by UnCoVar.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eAuto filling\u003c/h2\u003e\u003ca id=\"user-content-auto-filling\" class=\"anchor\" aria-label=\"Permalink: Auto filling\" href=\"#auto-filling\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eUnCoVar offers the possibility to automatically append samples to the sample\nsheet. To load your data into the workflow execute\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003esnakemake --cores all --use-conda update_sample\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003ewith the root of the UnCoVar as working directory. It is recommended to use\nthe following structure to when adding data automatically:\u003c/p\u003e\n\u003cpre\u003e\u003ccode\u003e\u251c\u2500\u2500 archive\n\u251c\u2500\u2500 incoming\n\u2514\u2500\u2500 snakemake-workflow-sars-cov2\n \u251c\u2500\u2500 data\n \u2514\u2500\u2500 ...\n\u003c/code\u003e\u003c/pre\u003e\n\u003cp\u003eHowever, this structure is not set in stone and can be adjusted via the\n\u003ccode\u003econfig/config.yaml\u003c/code\u003e file under \u003ccode\u003edata-handling\u003c/code\u003e. Only the following path to the\ncorresponding folders, relative to the directory of UnCoVar are needed:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cstrong\u003eincoming\u003c/strong\u003e: path of incoming data, which is moved to the data directory by\nthe preprocessing script. Defaults to \u003ccode\u003e../incoming/\u003c/code\u003e.\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003edata\u003c/strong\u003e: path to store data within the workflow. defaults to \u003ccode\u003edata/\u003c/code\u003e.\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003earchive\u003c/strong\u003e: path to archive data from the results from the analysis to.\nDefaults to \u003ccode\u003e../archive/\u003c/code\u003e.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp\u003eThe incoming directory should contain paired end reads in (compressed) FASTQ\nformat. UnCoVar automatically copies your data into the data directory and moves\nall files from incoming directory to the archive. After the analysis, all results\nare compressed and saved alongside the reads.\u003c/p\u003e\n\u003cp\u003eMoreover, the sample sheet is automatically updated with the new files. Please\nnote, that only the part of the filename before the first \u0027_\u0027 character is used\nas the sample name within the workflow.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\"\u003e\u003ch2 class=\"heading-element\"\u003eManual filling\u003c/h2\u003e\u003ca id=\"user-content-manual-filling\" class=\"anchor\" aria-label=\"Permalink: Manual filling\" href=\"#manual-filling\"\u003e\u003cspan aria-hidden=\"true\" class=\"octicon octicon-link\"\u003e\u003c/span\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp\u003eOf course, samples to be analyzed can also be added manually to the sample sheet.\nFor each sample, the a new line in \u003ccode\u003econfig/pep/samples.csv\u003c/code\u003e with the following\ncontent has to be defined:\u003c/p\u003e\n\u003cul\u003e\n\u003cli\u003e\n\u003cstrong\u003esample_name\u003c/strong\u003e: name or identifier of sample\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003efq1\u003c/strong\u003e: path to read 1 in FASTQ format\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003efq2\u003c/strong\u003e: path to read 2 in FASTQ format\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003edate\u003c/strong\u003e: sampling date of the sample\u003c/li\u003e\n\u003cli\u003e\n\u003cstrong\u003eis_amplicon_data\u003c/strong\u003e: indicates whether the data was generated with a\nshotgun (0) or amplicon (1) sequencing\u003c/li\u003e\n\u003c/ul\u003e\n", - "data_format": 2, - "description": "Transparent and robust SARS-CoV-2 variant calling and lineage assignment with comprehensive reporting.", - "formatting": "[DEBUG] \n[DEBUG] \n[DEBUG] \n[WARNING] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/qc.smk\": Keyword \"input\" at line 38 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[WARNING] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/qc.smk\": Keyword \"input\" at line 62 has comments under a value.\n\tPEP8 recommends block comments appear before what they describe\n(see https://www.python.org/dev/peps/pep-0008/#id30)\n[DEBUG] \n[DEBUG] \n[DEBUG] \n\u003cunknown\u003e:1: SyntaxWarning: invalid escape sequence \u0027\\S\u0027\n[DEBUG] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/ref.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n\u003cunknown\u003e:2: SyntaxWarning: invalid escape sequence \u0027\\#\u0027\n[DEBUG] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/variant_calling.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/generate_output.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] \n[DEBUG] In file \"/tmp/tmpi_dd5cya/IKIM-Essen-uncovar-53b99c4/workflow/rules/common.smk\": Formatted content is different from original\n[DEBUG] \n[DEBUG] \n[INFO] 4 file(s) would be changed \ud83d\ude2c\n[INFO] 17 file(s) would be left unchanged \ud83c\udf89\n\nsnakefmt version: 0.10.2", - "full_name": "IKIM-Essen/uncovar", - "latest_release": "v1.0.0", - "linting": null, - "mandatory_flags": null, - "report": false, - "software_stack_deployment": { - "conda": true - }, - "standardized": true, - "stargazers_count": 20, - "subscribers_count": 3, - "topics": [ - "sars-cov-2", - "variant-calling", - "lineage-assignment" - ], - "updated_at": 1726068377.0 - }, { "config_readme": null, "data_format": 2, @@ -55209,6 +55209,23 @@ var data = ], "updated_at": 1718131532.0 }, + { + "config_readme": null, + "data_format": 2, + "description": "A workflow and scripts for large-scale antiSMASH analyses", + "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/rules/common.smk\": Formatted content is different from original\n[INFO] 2 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", + "full_name": "zreitz/multismash", + "latest_release": "v0.4.0", + "linting": "IndexError in file /tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/rules/common.smk, line 8:\nlist index out of range\n File \"/tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/Snakefile\", line 5, in \u003cmodule\u003e\n File \"/tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/rules/common.smk\", line 8, in build_paths\n", + "mandatory_flags": [], + "report": false, + "software_stack_deployment": null, + "standardized": false, + "stargazers_count": 27, + "subscribers_count": 2, + "topics": [], + "updated_at": 1725901237.0 + }, { "config_readme": null, "data_format": 2, @@ -55232,23 +55249,6 @@ var data = ], "updated_at": 1723034334.0 }, - { - "config_readme": null, - "data_format": 2, - "description": "A workflow and scripts for large-scale antiSMASH analyses", - "formatting": "[DEBUG] \n[DEBUG] In file \"/tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/Snakefile\": Formatted content is different from original\n[DEBUG] \n[DEBUG] In file \"/tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/rules/common.smk\": Formatted content is different from original\n[INFO] 2 file(s) would be changed \ud83d\ude2c\n\nsnakefmt version: 0.10.2", - "full_name": "zreitz/multismash", - "latest_release": "v0.4.0", - "linting": "IndexError in file /tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/rules/common.smk, line 8:\nlist index out of range\n File \"/tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/Snakefile\", line 5, in \u003cmodule\u003e\n File \"/tmp/tmpdbhnk4xr/zreitz-multismash-71ddf42/workflow/rules/common.smk\", line 8, in build_paths\n", - "mandatory_flags": [], - "report": false, - "software_stack_deployment": null, - "standardized": false, - "stargazers_count": 27, - "subscribers_count": 2, - "topics": [], - "updated_at": 1725901237.0 - }, { "config_readme": null, "data_format": 2, diff --git a/skips.json b/skips.json index 1f0fc24..34dfa6b 100644 --- a/skips.json +++ b/skips.json @@ -1,4 +1,184 @@ [ + { + "full_name": "Louis-XIV-bis/variant_calling_scere", + "updated_at": 1726142816.0 + }, + { + "full_name": "EstherPlomp/TNW-PhD-Newsletters", + "updated_at": 1726141103.0 + }, + { + "full_name": "ValeriaDifilippo/NAFuse", + "updated_at": 1726233432.0 + }, + { + "full_name": "unipept/peptonizer", + "updated_at": 1726132493.0 + }, + { + "full_name": "RIVM-bioinformatics/juno-mapping", + "updated_at": 1726131594.0 + }, + { + "full_name": "nextstrain/forecasts-ncov", + "updated_at": 1726101510.0 + }, + { + "full_name": "squisquater/KitFoxGBS-PipelineTest20240910", + "updated_at": 1726101317.0 + }, + { + "full_name": "jbloomlab/seqneut-pipeline", + "updated_at": 1726100217.0 + }, + { + "full_name": "PacificBiosciences/pb-metagenomics-tools", + "updated_at": 1726096933.0 + }, + { + "full_name": "brss12/APAtizer", + "updated_at": 1726093726.0 + }, + { + "full_name": "DarrianTalamantes/Mapping_and_QTL", + "updated_at": 1726091103.0 + }, + { + "full_name": "hchulkim/replication-template", + "updated_at": 1726081836.0 + }, + { + "full_name": "jacksonh1/slim_conservation_scoring", + "updated_at": 1726075496.0 + }, + { + "full_name": "jbloomlab/Herpesvirus-Glycoprotein-Analysis", + "updated_at": 1726073601.0 + }, + { + "full_name": "jacksonh1/slim_conservation_orthogroup_generation", + "updated_at": 1726068076.0 + }, + { + "full_name": "afids/autoafids_prep", + "updated_at": 1726065830.0 + }, + { + "full_name": "kraemer-lab/GRAPEVNE", + "updated_at": 1726065712.0 + }, + { + "full_name": "gagneurlab/absplice", + "updated_at": 1726064712.0 + }, + { + "full_name": "jinghuazhao/pQTLtools", + "updated_at": 1726064103.0 + }, + { + "full_name": "santaci/SimOutbreakSelection", + "updated_at": 1726061732.0 + }, + { + "full_name": "epigen/cemm.slurm.sm", + "updated_at": 1726060133.0 + }, + { + "full_name": "ZYX593889540/BsClass", + "updated_at": 1726043555.0 + }, + { + "full_name": "snakemake/snakemake-hpc-teaching-material", + "updated_at": 1726040344.0 + }, + { + "full_name": "haydeeartaza/RareCNVsAnalysis", + "updated_at": 1726039959.0 + }, + { + "full_name": "Wytamma/snk", + "updated_at": 1726034878.0 + }, + { + "full_name": "tbrunetti/library", + "updated_at": 1726016237.0 + }, + { + "full_name": "tycheleturner/ElGenomaPequeno", + "updated_at": 1726012189.0 + }, + { + "full_name": "Schroeder-Group/Parasites-project", + "updated_at": 1726068275.0 + }, + { + "full_name": "ohsu-comp-bio/awesome-tes", + "updated_at": 1725994546.0 + }, + { + "full_name": "ralatsdc/nsforest-docker", + "updated_at": 1725988646.0 + }, + { + "full_name": "RTXteam/RTX-KG2", + "updated_at": 1725983524.0 + }, + { + "full_name": "NCI-CGR/MineSV", + "updated_at": 1725979678.0 + }, + { + "full_name": "martibosch/cookiecutter-data-snake", + "updated_at": 1725976894.0 + }, + { + "full_name": "BradleyH017/atlassing", + "updated_at": 1725974025.0 + }, + { + "full_name": "ashakru/howtocode", + "updated_at": 1726044152.0 + }, + { + "full_name": "alcrene/smttask", + "updated_at": 1725970784.0 + }, + { + "full_name": "pditommaso/awesome-pipeline", + "updated_at": 1726295269.0 + }, + { + "full_name": "Hutton-Potato-Genetics/nfHISS", + "updated_at": 1725956852.0 + }, + { + "full_name": "RasmussenLab/vamb", + "updated_at": 1725986056.0 + }, + { + "full_name": "yenon118/SQLmireba", + "updated_at": 1725935400.0 + }, + { + "full_name": "carpentries-incubator/snakemake-novice-bioinformatics", + "updated_at": 1726236307.0 + }, + { + "full_name": "Vimlendu27/super-linter", + "updated_at": 1725919645.0 + }, + { + "full_name": "liranmao/Spatial_multi_omics", + "updated_at": 1725917531.0 + }, + { + "full_name": "pelayovic/K-CHOPO", + "updated_at": 1725902943.0 + }, + { + "full_name": "matinnuhamunada/trycycler_snakemake_wrapper", + "updated_at": 1725899450.0 + }, { "full_name": "saforem2/awesome-stars", "updated_at": 1726278852.0 @@ -1727,110 +1907,6 @@ "full_name": "BlueBrain/luigi-tools", "updated_at": 1725310234.0 }, - { - "full_name": "jinghuazhao/pQTLtools", - "updated_at": 1726064103.0 - }, - { - "full_name": "santaci/SimOutbreakSelection", - "updated_at": 1726061732.0 - }, - { - "full_name": "epigen/cemm.slurm.sm", - "updated_at": 1726060133.0 - }, - { - "full_name": "ZYX593889540/BsClass", - "updated_at": 1726043555.0 - }, - { - "full_name": "snakemake/snakemake-hpc-teaching-material", - "updated_at": 1726040344.0 - }, - { - "full_name": "haydeeartaza/RareCNVsAnalysis", - "updated_at": 1726039959.0 - }, - { - "full_name": "Wytamma/snk", - "updated_at": 1726034878.0 - }, - { - "full_name": "tbrunetti/library", - "updated_at": 1726016237.0 - }, - { - "full_name": "tycheleturner/ElGenomaPequeno", - "updated_at": 1726012189.0 - }, - { - "full_name": "Schroeder-Group/Parasites-project", - "updated_at": 1726068275.0 - }, - { - "full_name": "ohsu-comp-bio/awesome-tes", - "updated_at": 1725994546.0 - }, - { - "full_name": "ralatsdc/nsforest-docker", - "updated_at": 1725988646.0 - }, - { - "full_name": "RTXteam/RTX-KG2", - "updated_at": 1725983524.0 - }, - { - "full_name": "NCI-CGR/MineSV", - "updated_at": 1725979678.0 - }, - { - "full_name": "martibosch/cookiecutter-data-snake", - "updated_at": 1725976894.0 - }, - { - "full_name": "BradleyH017/atlassing", - "updated_at": 1725974025.0 - }, - { - "full_name": "ashakru/howtocode", - "updated_at": 1726044152.0 - }, - { - "full_name": "alcrene/smttask", - "updated_at": 1725970784.0 - }, - { - "full_name": "pditommaso/awesome-pipeline", - "updated_at": 1726127741.0 - }, - { - "full_name": "Hutton-Potato-Genetics/nfHISS", - "updated_at": 1725956852.0 - }, - { - "full_name": "RasmussenLab/vamb", - "updated_at": 1725986056.0 - }, - { - "full_name": "yenon118/SQLmireba", - "updated_at": 1725935400.0 - }, - { - "full_name": "Vimlendu27/super-linter", - "updated_at": 1725919645.0 - }, - { - "full_name": "liranmao/Spatial_multi_omics", - "updated_at": 1725917531.0 - }, - { - "full_name": "pelayovic/K-CHOPO", - "updated_at": 1725902943.0 - }, - { - "full_name": "matinnuhamunada/trycycler_snakemake_wrapper", - "updated_at": 1725899450.0 - }, { "full_name": "shahhet195/mirror-action-github-super-linter", "updated_at": 1725881678.0 @@ -1907,78 +1983,6 @@ "full_name": "rwanwork/Snakemake-Example", "updated_at": 1725654415.0 }, - { - "full_name": "Louis-XIV-bis/variant_calling_scere", - "updated_at": 1726142816.0 - }, - { - "full_name": "EstherPlomp/TNW-PhD-Newsletters", - "updated_at": 1726141103.0 - }, - { - "full_name": "ValeriaDifilippo/NAFuse", - "updated_at": 1726139107.0 - }, - { - "full_name": "unipept/peptonizer", - "updated_at": 1726132493.0 - }, - { - "full_name": "RIVM-bioinformatics/juno-mapping", - "updated_at": 1726131594.0 - }, - { - "full_name": "nextstrain/forecasts-ncov", - "updated_at": 1726101510.0 - }, - { - "full_name": "squisquater/KitFoxGBS-PipelineTest20240910", - "updated_at": 1726101317.0 - }, - { - "full_name": "jbloomlab/seqneut-pipeline", - "updated_at": 1726100217.0 - }, - { - "full_name": "PacificBiosciences/pb-metagenomics-tools", - "updated_at": 1726096933.0 - }, - { - "full_name": "brss12/APAtizer", - "updated_at": 1726093726.0 - }, - { - "full_name": "DarrianTalamantes/Mapping_and_QTL", - "updated_at": 1726091103.0 - }, - { - "full_name": "hchulkim/replication-template", - "updated_at": 1726081836.0 - }, - { - "full_name": "jacksonh1/slim_conservation_scoring", - "updated_at": 1726075496.0 - }, - { - "full_name": "jbloomlab/Herpesvirus-Glycoprotein-Analysis", - "updated_at": 1726073601.0 - }, - { - "full_name": "jacksonh1/slim_conservation_orthogroup_generation", - "updated_at": 1726068076.0 - }, - { - "full_name": "afids/autoafids_prep", - "updated_at": 1726065830.0 - }, - { - "full_name": "kraemer-lab/GRAPEVNE", - "updated_at": 1726065712.0 - }, - { - "full_name": "gagneurlab/absplice", - "updated_at": 1726064712.0 - }, { "full_name": "pivlab/INCLUDE_TR01", "updated_at": 1720465965.0 @@ -2007,10 +2011,6 @@ "full_name": "y-yigit/Dataprocessing2024", "updated_at": 1720425199.0 }, - { - "full_name": "carpentries-incubator/snakemake-novice-bioinformatics", - "updated_at": 1723894171.0 - }, { "full_name": "nicholasloveday/heatwave_warnings", "updated_at": 1722571873.0