Skip to content

Commit

Permalink
bring in changes from main
Browse files Browse the repository at this point in the history
  • Loading branch information
sorelfitzgibbon committed Jan 29, 2024
2 parents 30dfed1 + 1360225 commit 6560630
Show file tree
Hide file tree
Showing 4 changed files with 44 additions and 22 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]

### Changed
- Pass reference index to `SomaticSniper` processes
- Update GATK `v4.4.0.0` to `v4.5.0.0`

## [8.0.0-rc.1] - 2023-12-13

### Changed
- `Strelka2` retry triggered by error code `1`
- Use external `indexFile` function
- Update submodules
- Sample names sanitized for all output
- Sample names parsed from input BAMs
- Update `MuSE` to `v2.0.4`
Expand Down
14 changes: 1 addition & 13 deletions main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ include { run_validate_PipeVal } from './external/pipeline-Nextflow-module/modul
main_process: "./" //Save logs in <log_dir>/process-log/run_validate_PipeVal
]
)
include { indexFile } from './external/pipeline-Nextflow-module/modules/common/indexFile/main.nf'

log.info """\
------------------------------------
Expand Down Expand Up @@ -106,19 +107,6 @@ include { intersect } from './module/intersect' addParams(
params.sample_id,
[:]))

// Returns the index file for the given bam or vcf
def indexFile(bam_or_vcf) {
if(bam_or_vcf.endsWith('.bam')) {
return "${bam_or_vcf}.bai"
}
else if(bam_or_vcf.endsWith('vcf.gz')) {
return "${bam_or_vcf}.tbi"
}
else {
throw new Exception("Index file for ${bam_or_vcf} file type not supported. Use .bam or .vcf.gz files.")
}
}

Channel
.from( params.samples_to_process )
.filter{ it.sample_type == 'tumor' }
Expand Down
3 changes: 3 additions & 0 deletions module/somaticsniper-processes.nf
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ process call_sSNV_SomaticSniper {
path tumor
path normal
path reference
path reference_index

output:
path "*.vcf", emit: bam_somaticsniper
Expand Down Expand Up @@ -66,6 +67,7 @@ process convert_BAM2Pileup_SAMtools {
input:
tuple val(type), path(bam)
path reference
path reference_index

output:
tuple val(type), path("${params.output_filename}_raw-${type}.pileup"), emit: raw_pileup
Expand Down Expand Up @@ -212,6 +214,7 @@ process generate_ReadCount_bam_readcount {

input:
path reference
path reference_index
path site_list
path tumor
path tumor_index
Expand Down
46 changes: 37 additions & 9 deletions module/somaticsniper.nf
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,23 @@ workflow somaticsniper {
normal_index

main:
call_sSNV_SomaticSniper(tumor_bam, normal_bam, params.reference)
call_sSNV_SomaticSniper(
tumor_bam,
normal_bam,
params.reference,
params.reference_index
)
tumor_bam_path = tumor_bam
.map{it -> ['tumor', it]}
normal_bam_path = normal_bam
.map{it -> ['normal', it]}
ch_convert_BAM2Pileup_SAMtools_bams = tumor_bam_path.mix(normal_bam_path)
convert_BAM2Pileup_SAMtools(ch_convert_BAM2Pileup_SAMtools_bams, params.reference)
ch_convert_BAM2Pileup_SAMtools_bams = tumor_bam_path
.mix(normal_bam_path)
convert_BAM2Pileup_SAMtools(
ch_convert_BAM2Pileup_SAMtools_bams,
params.reference,
params.reference_index
)
create_IndelCandidate_SAMtools(convert_BAM2Pileup_SAMtools.out.raw_pileup)

// tumor and normal need to be processed seperately.
Expand All @@ -47,12 +57,30 @@ workflow somaticsniper {
}
.set { ch_snpfilter }

apply_NormalIndelFilter_SomaticSniper(call_sSNV_SomaticSniper.out.bam_somaticsniper, ch_snpfilter.normal)
apply_TumorIndelFilter_SomaticSniper(apply_NormalIndelFilter_SomaticSniper.out.vcf_normal, ch_snpfilter.tumor)
create_ReadCountPosition_SomaticSniper(apply_TumorIndelFilter_SomaticSniper.out.vcf_tumor)
generate_ReadCount_bam_readcount(params.reference,create_ReadCountPosition_SomaticSniper.out.snp_positions, tumor_bam, tumor_index)
filter_FalsePositive_SomaticSniper(apply_TumorIndelFilter_SomaticSniper.out.vcf_tumor, generate_ReadCount_bam_readcount.out.readcount)
call_HighConfidenceSNV_SomaticSniper(filter_FalsePositive_SomaticSniper.out.fp_pass)
apply_NormalIndelFilter_SomaticSniper(
call_sSNV_SomaticSniper.out.bam_somaticsniper,
ch_snpfilter.normal
)
apply_TumorIndelFilter_SomaticSniper(
apply_NormalIndelFilter_SomaticSniper.out.vcf_normal,
ch_snpfilter.tumor
)
create_ReadCountPosition_SomaticSniper(
apply_TumorIndelFilter_SomaticSniper.out.vcf_tumor
)
generate_ReadCount_bam_readcount(
params.reference,
params.reference_index,
create_ReadCountPosition_SomaticSniper.out.snp_positions,
tumor_bam,tumor_index
)
filter_FalsePositive_SomaticSniper(
apply_TumorIndelFilter_SomaticSniper.out.vcf_tumor,
generate_ReadCount_bam_readcount.out.readcount
)
call_HighConfidenceSNV_SomaticSniper(
filter_FalsePositive_SomaticSniper.out.fp_pass
)
// combining to delay compression until after filtering step
compress_file_bzip2(
generate_ReadCount_bam_readcount.out.readcount
Expand Down

0 comments on commit 6560630

Please sign in to comment.