Skip to content

Commit

Permalink
Merge pull request #73 from nf-core/fix-ugly-channel-call
Browse files Browse the repository at this point in the history
Fix variable names for clarity
  • Loading branch information
jfy133 authored Jan 31, 2025
2 parents fb53338 + ddb539c commit bc915cc
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 20 deletions.
2 changes: 1 addition & 1 deletion tests/test.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ nextflow_pipeline {
path("$outputDir/centrifuge/"),
path("$outputDir/diamond/database.dmnd"),
path("$outputDir/ganon/database.hibf"),
path("$outputDir/ganon/database.tax"),
path("$outputDir/ganon/database.tax").readLines().last().contains('Haemophilus_influenzae'),
path("$outputDir/kaiju/database.fmi"),
path("$outputDir/kraken2/database/hash.k2d"),
file("$outputDir/kraken2/database/opts.k2d").name,
Expand Down
4 changes: 2 additions & 2 deletions tests/test.nf.test.snap
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
],
"database.dmnd:md5,b2ea49ef5490c526e2c56cae19bcb462",
"database.hibf:md5,af913cecda744b02751e2f5320c35c7c",
"database.tax:md5,7a72295c6f4dae6efb16ee3423cce10d",
true,
"database.fmi:md5,9bd6adc74d13c77cf613bb190cc040e0",
"hash.k2d:md5,941118164b4bcc010593f7a7c7b30029",
"opts.k2d",
Expand All @@ -33,6 +33,6 @@
"nf-test": "0.9.2",
"nextflow": "24.10.4"
},
"timestamp": "2025-01-30T10:09:57.532131085"
"timestamp": "2025-01-31T12:21:11.452810452"
}
}
39 changes: 22 additions & 17 deletions workflows/createtaxdb.nf
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,13 @@ include { FASTA_BUILD_ADD_KRAKEN2_BRACKEN } from '../subworkflows/nf-core/fasta_

workflow CREATETAXDB {
take:
ch_samplesheet // channel: samplesheet read in from --input
ch_taxonomy_namesdmp // channel: taxonomy names file
ch_taxonomy_nodesdmp // channel: taxonomy nodes file
ch_accession2taxid // channel: accession2taxid file
ch_nucl2taxid // channel: nucl2taxid file
ch_prot2taxid // channel: prot2taxid file
ch_malt_mapdb // channel: maltmap file
ch_samplesheet // channel: samplesheet read in from --input
file_taxonomy_namesdmp // file: taxonomy names file
file_taxonomy_nodesdmp // file: taxonomy nodes file
file_accession2taxid // file: accession2taxid file
file_nucl2taxid // fle: nucl2taxid file
file_prot2taxid // file: prot2taxid file
file_malt_mapdb // file: maltmap file

main:

Expand Down Expand Up @@ -110,7 +110,7 @@ workflow CREATETAXDB {
ch_versions = ch_versions.mix(CAT_CAT_AA.out.versions.first())
}
if ([(params.build_malt && malt_build_mode == 'protein'), params.build_kaiju].any()) {
SEQKIT_REPLACE(ch_prepped_aa_fastas_ungrouped.dump(tag: 'ungrouped'))
SEQKIT_REPLACE(ch_prepped_aa_fastas_ungrouped)
ch_versions = ch_versions.mix(SEQKIT_REPLACE.out.versions.first())
ch_prepped_aa_fastas_kaiju = SEQKIT_REPLACE.out.fastx.map { _meta, fasta -> [[id: params.dbname], fasta] }.groupTuple()
CAT_CAT_AA_KAIJU(ch_prepped_aa_fastas_kaiju)
Expand All @@ -128,7 +128,7 @@ workflow CREATETAXDB {
// Module: Run CENTRIFUGE/BUILD

if (params.build_centrifuge) {
CENTRIFUGE_BUILD(ch_singleref_for_dna, ch_nucl2taxid, ch_taxonomy_nodesdmp, ch_taxonomy_namesdmp, [])
CENTRIFUGE_BUILD(ch_singleref_for_dna, file_nucl2taxid, file_taxonomy_nodesdmp, file_taxonomy_namesdmp, [])
ch_versions = ch_versions.mix(CENTRIFUGE_BUILD.out.versions.first())
ch_centrifuge_output = CENTRIFUGE_BUILD.out.cf
}
Expand All @@ -139,7 +139,7 @@ workflow CREATETAXDB {
// MODULE: Run DIAMOND/MAKEDB

if (params.build_diamond) {
DIAMOND_MAKEDB(ch_singleref_for_aa, ch_prot2taxid, ch_taxonomy_nodesdmp, ch_taxonomy_namesdmp)
DIAMOND_MAKEDB(ch_singleref_for_aa, file_prot2taxid, file_taxonomy_nodesdmp, file_taxonomy_namesdmp)
ch_versions = ch_versions.mix(DIAMOND_MAKEDB.out.versions.first())
ch_diamond_output = DIAMOND_MAKEDB.out.db
}
Expand All @@ -165,7 +165,7 @@ workflow CREATETAXDB {
}

// Nodes must come first
ch_ganon_tax_files = Channel.fromPath(ch_taxonomy_nodesdmp).combine(Channel.fromPath(ch_taxonomy_namesdmp))
ch_ganon_tax_files = Channel.fromPath(file_taxonomy_nodesdmp).combine(Channel.fromPath(file_taxonomy_namesdmp))

GANON_BUILDCUSTOM(ch_prepped_dna_fastas, ch_ganon_input_tsv.map { _meta, tsv -> tsv }, ch_ganon_tax_files, [])
ch_versions = ch_versions.mix(GANON_BUILDCUSTOM.out.versions.first())
Expand All @@ -191,7 +191,7 @@ workflow CREATETAXDB {
// Condition is inverted because subworkflow asks if you want to 'clean' (true) or not, but pipeline says to 'keep'
if (params.build_kraken2 || params.build_bracken) {
def k2_keepintermediates = params.kraken2_keepintermediate || params.build_bracken ? false : true
FASTA_BUILD_ADD_KRAKEN2_BRACKEN(ch_singleref_for_dna, ch_taxonomy_namesdmp, ch_taxonomy_nodesdmp, ch_accession2taxid, k2_keepintermediates, params.build_bracken)
FASTA_BUILD_ADD_KRAKEN2_BRACKEN(ch_singleref_for_dna, file_taxonomy_namesdmp, file_taxonomy_nodesdmp, file_accession2taxid, k2_keepintermediates, params.build_bracken)
ch_versions = ch_versions.mix(FASTA_BUILD_ADD_KRAKEN2_BRACKEN.out.versions.first())
ch_kraken2_bracken_output = FASTA_BUILD_ADD_KRAKEN2_BRACKEN.out.db
}
Expand All @@ -202,8 +202,13 @@ workflow CREATETAXDB {
// SUBWORKFLOW: Run KRAKENUNIQ/BUILD
if (params.build_krakenuniq) {

ch_taxdmpfiles_for_krakenuniq = Channel.of(ch_taxonomy_namesdmp).combine(Channel.of(ch_taxonomy_nodesdmp)).map { [it] }
ch_input_for_krakenuniq = ch_prepped_dna_fastas.combine(ch_taxdmpfiles_for_krakenuniq).map { meta, fastas, taxdump -> [meta, fastas, taxdump, ch_nucl2taxid] }
ch_taxdmpfiles_for_krakenuniq = Channel
.of(file_taxonomy_namesdmp)
.combine(Channel.of(file_taxonomy_nodesdmp))
.map { [it] }

Channel.of(file_nucl2taxid)
ch_input_for_krakenuniq = ch_prepped_dna_fastas.combine(ch_taxdmpfiles_for_krakenuniq).map { meta, fastas, taxdump -> [meta, fastas, taxdump, file_nucl2taxid] }

KRAKENUNIQ_BUILD(ch_input_for_krakenuniq, params.krakenuniq_keepintermediate)
ch_versions = ch_versions.mix(KRAKENUNIQ_BUILD.out.versions.first())
Expand All @@ -218,11 +223,11 @@ workflow CREATETAXDB {
if (params.build_malt) {

// The map DB file comes zipped (for some reason) from MEGAN6 website
if (file(params.malt_mapdb).extension == 'zip') {
ch_malt_mapdb = UNZIP([[], params.malt_mapdb]).unzipped_archive.map { _meta, file -> [file] }
if (file_malt_mapdb.extension == 'zip') {
ch_malt_mapdb = UNZIP([[], file_malt_mapdb]).unzipped_archive.map { _meta, file -> [file] }
}
else {
ch_malt_mapdb = file(params.malt_mapdb)
ch_malt_mapdb = file(file_malt_mapdb)
}

if (malt_build_mode == 'protein') {
Expand Down

0 comments on commit bc915cc

Please sign in to comment.