Skip to content

Commit 0f89fcc

Browse files
authored
Merge pull request #30 from Juke34/spelling
Spelling and channel names
2 parents 7f265dc + 9d0efec commit 0f89fcc

File tree

8 files changed

+22
-33
lines changed

8 files changed

+22
-33
lines changed

modules/pigz.nf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ A parallel implementation of gzip for modern
33
multi-processor, multi-core machines
44
https://zlib.net/pigz/
55
*/
6-
process fasta_uncompress {
6+
process fasta_unzip {
77
tag "$genome"
88
label 'pigz'
99

nextflow.config

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ params {
1616
}
1717

1818
// Include a base config (2 forks of 1 CPU)
19-
includeConfig "$baseDir/config/ressources/base.config"
19+
includeConfig "$baseDir/config/resources/base.config"
2020

2121
profiles {
2222

@@ -32,7 +32,7 @@ profiles {
3232
params.sifPath = "${baseDir}/sif_images"
3333
// singularity.envWhitelist = '' // Comma separated list of environment variable names to be included in the container environment.
3434
includeConfig "$baseDir/config/softwares.config"
35-
includeConfig "$baseDir/config/ressources/hpc.config"
35+
includeConfig "$baseDir/config/resources/hpc.config"
3636
}
3737

3838
debug { process.beforeScript = 'env' }
@@ -50,10 +50,10 @@ profiles {
5050
includeConfig "$baseDir/config/softwares.config"
5151
}
5252
local {
53-
includeConfig "$baseDir/config/ressources/local.config"
53+
includeConfig "$baseDir/config/resources/local.config"
5454
}
5555
test {
56-
params.aline_profiles = "${baseDir}/config/ressources/base_aline.config"
56+
params.aline_profiles = "${baseDir}/config/resources/base_aline.config"
5757
params.aligner = "STAR"
5858
params.reads = "${baseDir}/data/chr21/chr21_small_R1.fastq.gz "
5959
params.genome = "${baseDir}/data/chr21/chr21_small.fasta.gz"
@@ -62,7 +62,7 @@ profiles {
6262
params.read_type = "short_single"
6363
}
6464
test2 {
65-
params.aline_profiles = "${baseDir}/config/ressources/base_aline.config"
65+
params.aline_profiles = "${baseDir}/config/resources/base_aline.config"
6666
params.aligner = "STAR"
6767
params.reads = "${baseDir}/data/chr21/"
6868
params.genome = "${baseDir}/data/chr21/chr21_small.fasta.gz"

rain.nf

Lines changed: 16 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ params.aggregation_mode = "all"
3636
aline_profile_allowed = [ 'docker', 'singularity', 'local', 'itrop' ]
3737

3838
// Aline ressource config used
39-
params.aline_profiles = "$baseDir/config/ressources/custom_aline.config" // e.g. "docker, singularity,itrop,local"
39+
params.aline_profiles = "$baseDir/config/resources/custom_aline.config" // e.g. "docker, singularity,itrop,local"
4040

4141
// Aligner params
4242
align_tools = ['hisat2', "STAR"]
@@ -139,7 +139,7 @@ include {fastp} from './modules/fastp.nf'
139139
include {fastqc as fastqc_raw; fastqc as fastqc_ali; fastqc as fastqc_dup; fastqc as fastqc_clip} from './modules/fastqc.nf'
140140
include {gatk_markduplicates } from './modules/gatk.nf'
141141
include {multiqc} from './modules/multiqc.nf'
142-
include {fasta_uncompress} from "$baseDir/modules/pigz.nf"
142+
include {fasta_unzip} from "$baseDir/modules/pigz.nf"
143143
include {samtools_index; samtools_fasta_index; samtools_sort_bam} from './modules/samtools.nf'
144144
include {reditools2} from "./modules/reditools2.nf"
145145
include {reditools3} from "./modules/reditools3.nf"
@@ -205,16 +205,16 @@ workflow {
205205
Channel.fromPath(params.genome, checkIfExists: true)
206206
.ifEmpty { exit 1, "Cannot find genome matching ${params.genome}!\n" }
207207
.set{genome_raw}
208-
// uncompress it if needed
209-
fasta_uncompress(genome_raw)
210-
fasta_uncompress.out.genomeFa.set{genome_ch} // set genome to the output of fasta_uncompress
208+
// unzip it if needed
209+
fasta_unzip(genome_raw)
210+
fasta_unzip.out.genomeFa.set{genome} // set genome to the output of fasta_unzip
211211
// ----------------------------------------------------------------------------
212212
// --- DEAL WITH ANNOTATION ---
213-
Channel.empty().set{annotation_ch}
213+
Channel.empty().set{annotation}
214214
if (params.annotation){
215215
Channel.fromPath(params.annotation, checkIfExists: true)
216216
.ifEmpty { exit 1, "Cannot find annotation matching ${params.annotation}!\n" }
217-
.set{annotation_ch}
217+
.set{annotation}
218218
}
219219
// ----------------------------------------------------------------------------
220220
def path_csv = params.csv
@@ -368,7 +368,7 @@ workflow {
368368
"${workflow.resume?'-resume':''} -profile ${aline_profile}", // workflow opts supplied as params for flexibility
369369
"-config ${params.aline_profiles}",
370370
"--reads ${path_reads}",
371-
genome_ch,
371+
genome,
372372
"--read_type ${params.read_type}",
373373
"--aligner ${params.aligner}",
374374
"--library_type ${params.library_type}",
@@ -390,7 +390,7 @@ workflow {
390390
if (params.library_type.contains("auto") ) {
391391
log.info "Library type is set to auto, extracting it from salmon output"
392392
// GET TUPLE [ID, OUTPUT_SALMON_LIBTYPE] FILES
393-
ALIGNMENT.out.output
393+
aline_alignments_all = ALIGNMENT.out.output
394394
.map { dir ->
395395
files("$dir/salmon_libtype/*/*.json", checkIfExists: true) // Find BAM files inside the output directory
396396
}
@@ -404,7 +404,7 @@ workflow {
404404
aline_libtype = extract_libtype(aline_libtype)
405405
aline_alignments.join(aline_libtype)
406406
.map { key, val1, val2 -> tuple(key, val1, val2) }
407-
.set { aline_alignments_all }
407+
408408
} else {
409409
log.info "Library type is set to ${params.library_type}, no need to extract it from salmon output"
410410
aline_alignments_all = aline_alignments.map { name, bam -> tuple(name, bam, params.library_type) }
@@ -418,20 +418,8 @@ workflow {
418418
}
419419

420420
// call rain
421-
all_bams = aline_alignments_all.mix(sorted_bam)
421+
tuple_sample_sortedbam = aline_alignments_all.mix(sorted_bam)
422422
log.info "The following bam file(s) will be processed by RAIN:"
423-
all_bams.view()
424-
rain(all_bams, genome_ch, annotation_ch)
425-
}
426-
427-
workflow rain {
428-
429-
take:
430-
tuple_sample_sortedbam
431-
genome
432-
annnotation
433-
434-
main:
435423

436424
// STEP 1 QC with fastp ?
437425
Channel.empty().set{logs}
@@ -459,30 +447,31 @@ workflow rain {
459447
samtools_index(tuple_sample_bam_processed)
460448
// report with multiqc
461449
// multiqc(logs.collect(),params.multiqc_config)
462-
// Create a fasta index file of the reference genome
463-
samtools_fasta_index(genome.collect())
464-
465-
normalize_gxf(annnotation)
466450

467451
// Select site detection tool
468452
switch (params.edit_site_tool) {
469453
case "jacusa2":
454+
// Create a fasta index file of the reference genome
455+
samtools_fasta_index(genome.collect())
470456
jacusa2(samtools_index.out.tuple_sample_bam_bamindex, samtools_fasta_index.out.tuple_fasta_fastaindex.collect())
471457
break
472458
case "sapin":
473459
sapin(tuple_sample_bam_processed, genome.collect())
474460
break
475461
case "reditools2":
476462
reditools2(samtools_index.out.tuple_sample_bam_bamindex, genome.collect(), params.region)
463+
normalize_gxf(annotation.collect())
477464
pluviometer(reditools2.out.tuple_sample_serial_table, normalize_gxf.out.gff.collect(), "reditools2")
478465
break
479466
case "reditools3":
480467
reditools3(samtools_index.out.tuple_sample_bam_bamindex, genome.collect())
468+
normalize_gxf(annotation.collect())
481469
pluviometer(reditools3.out.tuple_sample_serial_table, normalize_gxf.out.gff.collect(), "reditools3")
482470
break
483471
default:
484472
exit(1, "Wrong edit site tool was passed")
485473
}
474+
486475
}
487476

488477

0 commit comments

Comments
 (0)