Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion modules/local/cramino/main.nf
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
process CRAMINO {
tag "$meta.id"
label 'process_single'
label 'process_low'

conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
Expand Down
2 changes: 1 addition & 1 deletion modules/nf-core/ascat/main.nf

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion nextflow.config
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ params {
ascat_penalty = 150
ascat_purity = null
ascat_longread_bins = 2000
ascat_pdf_plots = "FALSE"
ascat_pdf_plots = false
ascat_allelecounter_flags = "-f 0"
ascat_chroms = null // Only use if running on a subset of chromosomes (c(1:22, 'X', 'Y'))

Expand Down
4 changes: 4 additions & 0 deletions nextflow_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,10 @@
"ascat_rt_files": {
"type": "string",
"description": "path to (zip) of RT files"
},
"ascat_pdf_plots": {
"type": "boolean",
"description": "Boolean for ASCAT production of pdf plots (entered as string)"
}
}
},
Expand Down
2 changes: 1 addition & 1 deletion ro-crate-metadata.json

Large diffs are not rendered by default.

12 changes: 7 additions & 5 deletions subworkflows/local/prepare_reference_files.nf
Original file line number Diff line number Diff line change
Expand Up @@ -39,23 +39,25 @@ workflow PREPARE_REFERENCE_FILES {
ch_prepared_fasta = [ [:], fasta ]
}

// if clair3 model is specified, then download that
// otherwise use info in bam header and download that

basecall_meta.map { meta, basecall_model_meta, kinetics_meta ->
def id_new = basecall_model_meta ?: meta.clair3_model
def id_new = basecall_model_meta ? clair3_modelMap.get(basecall_model_meta) : basecall_model_meta
def meta_new = [id: id_new]
def model = (!meta.clair3_model || meta.clair3_model.toString().trim() in ['', '[]']) ? clair3_modelMap.get(basecall_model_meta) : meta.clair3_model
def download_prefix = ( basecall_model_meta == 'hifi_revio' ? "https://www.bio8.cs.hku.hk/clair3/clair3_models/" : "https://cdn.oxfordnanoportal.com/software/analysis/models/clair3" )
def url = "${download_prefix}/${model}.tar.gz"
return [ meta_new, url ]
}
.unique()
.set{ model_urls }
.set{ clair3_model_urls }

//
// MODULE: Download model
//

WGET ( model_urls )
WGET ( clair3_model_urls )

ch_versions = ch_versions.mix(WGET.out.versions)

Expand All @@ -69,7 +71,7 @@ workflow PREPARE_REFERENCE_FILES {

ch_versions = ch_versions.mix(UNTAR.out.versions)

UNTAR.out.untar.set { downloaded_model_files }
UNTAR.out.untar.set { downloaded_clair3_models }

//
// MODULE: Index the fasta
Expand Down Expand Up @@ -126,7 +128,7 @@ workflow PREPARE_REFERENCE_FILES {
loci_files
gc_file
rt_file
downloaded_model_files
downloaded_clair3_models

versions = ch_versions
}
55 changes: 31 additions & 24 deletions subworkflows/local/tumor_normal_happhase.nf
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ workflow TUMOR_NORMAL_HAPPHASE {
fai
clair3_modelMap
clairs_modelMap
downloaded_model_files
downloaded_clair3_models

main:

Expand All @@ -33,34 +33,42 @@ workflow TUMOR_NORMAL_HAPPHASE {
// Get normal bams and add platform/model info for Clair3 usage
// remove type from so that information can be merged easier later

downloaded_model_files
downloaded_clair3_models
.map{ meta, file ->
def basecall_model = meta.id
return [basecall_model, meta, file]
def clair3_model = meta.id
return [meta, clair3_model, file]
}
.set{downloaded_model_files}
.set{downloaded_clair3_models}

mixed_bams.normal
mixed_bams.normal
.map{ meta, bam, bai ->
def basecall_model = (!meta.clair3_model || meta.clair3_model.toString().trim() in ['', '[]']) ? meta.basecall_model : meta.clair3_model
def new_meta = [id: meta.id,
paired_data: meta.paired_data,
platform: meta.platform,
sex: meta.sex,
fiber: meta.fiber,
basecall_model: basecall_model,
clairS_model: meta.clairS_model]
return [ basecall_model, new_meta, bam, bai ]
clair3_model: meta.clair3_model,
clairS_model: meta.clairS_model,
clairSTO_model: meta.clairSTO_model,
kinetics: meta.kinetics]
return [ new_meta, meta.clair3_model, bam, bai ]
}
.set { normal_bams_model }

normal_bams_model
.combine(downloaded_model_files,by:0)
.map{ basecall_model, meta, bam, bai, meta2, model ->
.combine(downloaded_clair3_models,by:1)
.map {clair3_model, meta_bam, bam, bai, meta_model, model ->
def platform = (meta_bam.platform == 'pb') ? 'hifi' : meta_bam.platform
return [meta_bam, bam, bai, model, platform]
}
.set{ normal_bams }

/*
.map{ basecall_model, meta, bam, bai, meta2, model ->
def platform = (meta.platform == "pb") ? "hifi" : "ont"
return [meta, bam, bai, model, platform]
}
.set{ normal_bams }
*/

// normal_bams -> meta: [id, paired_data, platform, sex, fiber, basecall_model]
// bam: list of concatenated aligned bams
Expand All @@ -73,14 +81,15 @@ workflow TUMOR_NORMAL_HAPPHASE {
// remove type from so that information can be merged easier later
mixed_bams.tumor
.map{ meta, bam, bai ->
def basecall_model = (!meta.clair3_model || meta.clair3_model.toString().trim() in ['', '[]']) ? meta.basecall_model : meta.clair3_model
def new_meta = [id: meta.id,
paired_data: meta.paired_data,
platform: meta.platform,
sex: meta.sex,
fiber: meta.fiber,
basecall_model: basecall_model,
clairS_model: meta.clairS_model]
clair3_model: meta.clair3_model,
clairS_model: meta.clairS_model,
clairSTO_model: meta.clairSTO_model,
kinetics: meta.kinetics]
return[new_meta, bam, bai]
}
.set{ tumor_bams }
Expand Down Expand Up @@ -162,6 +171,7 @@ workflow TUMOR_NORMAL_HAPPHASE {

// Add phased vcf to tumour bams and type information
// mix with the normal bams

tumor_bams
.join(LONGPHASE_PHASE.out.snv_vcf)
.map { meta, bam, bai, vcf ->
Expand Down Expand Up @@ -211,13 +221,11 @@ workflow TUMOR_NORMAL_HAPPHASE {
)

ch_versions = ch_versions.mix(SAMTOOLS_INDEX.out.versions)

// Add index to channel
mixed_bams_vcf
.join(mixed_hapbams)
.join(SAMTOOLS_INDEX.out.bai)
.set{ mixed_hapbams }

// mixed_hapbams -> meta: [id, paired_data, platform, sex, type, fiber, basecall_model]
// bams: haplotagged aligned bams
// bais: indexes for bam files
Expand All @@ -230,8 +238,10 @@ workflow TUMOR_NORMAL_HAPPHASE {
platform: meta.platform,
sex: meta.sex,
fiber: meta.fiber,
basecall_model: meta.basecall_model,
clairS_model: meta.clairS_model]
clair3_model: meta.clair3_model,
clairS_model: meta.clairS_model,
clairSTO_model: meta.clairSTO_model,
kinetics: meta.kinetics]
return[new_meta, [[type: meta.type], hapbam], [[type: meta.type], hapbai]]
}
.groupTuple(size: 2)
Expand All @@ -245,7 +255,6 @@ workflow TUMOR_NORMAL_HAPPHASE {
}
.join(LONGPHASE_PHASE.out.snv_vcf)
.set{tumor_normal_severus}

// tumor_normal_severus -> meta: [id, paired_data, platform, sex, fiber, basecall_model]
// tumor_bam: haplotagged aligned bam for tumor
// tumor_bai: indexes for tumor bam files
Expand All @@ -256,11 +265,9 @@ workflow TUMOR_NORMAL_HAPPHASE {
// Get ClairS input channel
tumor_normal_severus
.map { meta, tumor_bam, tumor_bai, normal_bam, normal_bai, vcf ->
def model = (!meta.clairS_model || meta.clairS_model.toString().trim() in ['', '[]']) ? clairs_modelMap.get(meta.basecall_model.toString().trim()) : meta.clairS_model
return[meta , tumor_bam, tumor_bai, normal_bam, normal_bai, model]
return[meta , tumor_bam, tumor_bai, normal_bam, normal_bai, meta.clairS_model]
}
.set { clairs_input }

//
// MODULE: CLAIRS
//
Expand Down
8 changes: 5 additions & 3 deletions subworkflows/local/tumor_only_happhase.nf
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ workflow TUMOR_ONLY_HAPPHASE {

tumor_bams
.map{ meta, bam, bai ->
def clairSTO_model = (!meta.clairSTO_model || meta.clairSTO_model.toString().trim() in ['', '[]']) ? clairSTO_modelMap.get(meta.basecall_model.toString().trim()) : meta.clairSTO_model
return [meta, bam, bai, clairSTO_model]
return [meta, bam, bai, meta.clairSTO_model]
}
.set{ tumor_bams }

Expand Down Expand Up @@ -168,7 +167,10 @@ workflow TUMOR_ONLY_HAPPHASE {
platform: meta.platform,
sex: meta.sex,
fiber: meta.fiber,
basecall_model: meta.basecall_model]
clair3_model: meta.clair3_model,
clairS_model: meta.clairS_model,
clairSTO_model: meta.clairSTO_model,
kinetics: meta.kinetics]
return [new_meta, hap_bam, hap_bai, [], [], vcf]
}
.set{ tumor_only_severus }
Expand Down
24 changes: 24 additions & 0 deletions tests/default.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,30 @@ nextflow_pipeline {
def stable_path = getAllFilesFromDir(params.outdir, ignoreFile: 'tests/.nftignore')
assertAll(
{ assert workflow.success},
{ //files exist
assert file("$launchDir/output/sample1/variants/clair3/merge_output.vcf.gz").exists()
assert file("$launchDir/output/sample1/variants/clair3/merge_output.vcf.gz.tbi").exists()
assert file("$launchDir/output/sample1/variants/clairs/indel.vcf.gz").exists()
assert file("$launchDir/output/sample1/variants/clairs/indel.vcf.gz.tbi").exists()
assert file("$launchDir/output/sample1/variants/clairs/snvs.vcf.gz").exists()
assert file("$launchDir/output/sample1/variants/clairs/snvs.vcf.gz.tbi").exists()
assert file("$launchDir/output/sample1/variants/severus/somatic_SVs/severus_somatic.vcf.gz").exists()
assert file("$launchDir/output/sample2/variants/clair3/merge_output.vcf.gz").exists()
assert file("$launchDir/output/sample2/variants/clair3/merge_output.vcf.gz.tbi").exists()
assert file("$launchDir/output/sample2/variants/clairs/indel.vcf.gz").exists()
assert file("$launchDir/output/sample2/variants/clairs/indel.vcf.gz.tbi").exists()
assert file("$launchDir/output/sample2/variants/clairs/snvs.vcf.gz").exists()
assert file("$launchDir/output/sample2/variants/clairs/snvs.vcf.gz.tbi").exists()
assert file("$launchDir/output/sample2/variants/severus/somatic_SVs/severus_somatic.vcf.gz").exists()
assert file("$launchDir/output/sample1/bamfiles/sample1_normal.bam").exists()
assert file("$launchDir/output/sample1/bamfiles/sample1_tumor.bam").exists()
assert file("$launchDir/output/sample1/bamfiles/sample1_normal.bam.bai").exists()
assert file("$launchDir/output/sample1/bamfiles/sample1_tumor.bam.bai").exists()
assert file("$launchDir/output/sample3/variants/clairsto/indel.vcf.gz").exists()
assert file("$launchDir/output/sample3/variants/clairsto/snv.vcf.gz").exists()
assert file("$launchDir/output/sample3/variants/clairsto/somatic.vcf.gz").exists()
assert file("$launchDir/output/sample3/variants/clairsto/germline.vcf.gz").exists()
},
{ assert snapshot(
// pipeline versions.yml file for multiqc from which Nextflow version is removed because we test pipelines on multiple Nextflow versions
removeNextflowVersion("$outputDir/pipeline_info/lrsomatic_software_mqc_versions.yml"),
Expand Down
16 changes: 8 additions & 8 deletions tests/default.nf.test.snap
Original file line number Diff line number Diff line change
Expand Up @@ -309,10 +309,10 @@
"sample1.stats:md5,da6ea076dd90b4e35ab2e23b8e6ead25",
"breakpoints_double.csv:md5,57e4f0d5509db44179e7c5044c6bc259",
"read_qual.txt:md5,1ad9d1900f8dcb291c97adc65c9d341c",
"sample2_normal.bam:md5,7e6cdc6c51235a205548be160d798d38",
"sample2_normal.bam.bai:md5,7ad774be4cf80ae968f94d760bddf3c8",
"sample2_tumor.bam:md5,9938db44490835ec0e11069b4da37547",
"sample2_tumor.bam.bai:md5,202852a62fc5bf4cf1c196676068606a",
"sample2_normal.bam:md5,554b89692e84b9ddd0615649e2b15820",
"sample2_normal.bam.bai:md5,af193f1922d90b8741212d2bf690c418",
"sample2_tumor.bam:md5,26c4e52c12aa0e874fe52ae3b729beba",
"sample2_tumor.bam.bai:md5,e9e64e13328aa3621e7976f8e3f29a78",
"sample2.mosdepth.global.dist.txt:md5,6cdc97a81a603db702cb5a113b8bc62a",
"sample2.mosdepth.summary.txt:md5,864370930ec1d695d942f4960bcf8fc6",
"sample2.flagstat:md5,cce0bb7ca79e14d8369ccc714adf4be3",
Expand All @@ -323,7 +323,7 @@
"sample2.flagstat:md5,83e7d7d922941691d2b023f0bd9655aa",
"sample2.idxstats:md5,fe8a5d1263481ea7902d575b4d95f655",
"sample2.stats:md5,defe74842396209b6cff4b32994287c7",
"breakpoints_double.csv:md5,c5a59c9ea2486f7bb9d5e40fea8f916d",
"breakpoints_double.csv:md5,b71bba578c126b9217765d854b21028a",
"read_qual.txt:md5,27edf87814aec6fa18546c8606aae4ed",
"sample3_tumor.bam:md5,2308beb1b4be1f0e1d6c8e52bd4f9266",
"sample3_tumor.bam.bai:md5,840eb3ad5ed3216a97c6a58563d4dcb1",
Expand All @@ -337,9 +337,9 @@
]
],
"meta": {
"nf-test": "0.9.2",
"nextflow": "25.10.0"
"nf-test": "0.9.3",
"nextflow": "25.10.2"
},
"timestamp": "2026-01-07T12:24:56.969201207"
"timestamp": "2026-01-09T17:08:56.493545818"
}
}
Loading