1
1
#! /usr/bin/env nextflow
2
2
3
+ import java.time.LocalDateTime
4
+
3
5
nextflow. enable. dsl = 2
4
6
7
+ include { hash_files } from ' ./modules/hash_files.nf'
5
8
include { fastp } from ' ./modules/fastp.nf'
6
9
include { fastp_json_to_csv } from ' ./modules/fastp.nf'
7
10
include { shovill } from ' ./modules/shovill.nf'
@@ -10,32 +13,66 @@ include { prokka } from './modules/prokka.nf'
10
13
include { bakta } from ' ./modules/bakta.nf'
11
14
include { quast } from ' ./modules/quast.nf'
12
15
include { parse_quast_report } from ' ./modules/quast.nf'
16
+ include { pipeline_provenance } from ' ./modules/provenance.nf'
17
+ include { collect_provenance } from ' ./modules/provenance.nf'
13
18
14
19
15
20
workflow {
21
+ ch_start_time = Channel . of(LocalDateTime . now())
22
+ ch_pipeline_name = Channel . of(workflow. manifest. name)
23
+ ch_pipeline_version = Channel . of(workflow. manifest. version)
24
+
25
+ ch_pipeline_provenance = pipeline_provenance(ch_pipeline_name. combine(ch_pipeline_version). combine(ch_start_time))
26
+
16
27
if (params. samplesheet_input != ' NO_FILE' ) {
17
- ch_fastq = Channel . fromPath(params. samplesheet_input). splitCsv(header : true )
28
+ ch_fastq = Channel . fromPath(params. samplesheet_input). splitCsv(header : true ). map{ it -> [it[ ' ID ' ], it[ ' R1 ' ], it[ ' R2 ' ]] }
18
29
} else {
19
30
ch_fastq = Channel . fromFilePairs( params. fastq_search_path, flat : true ). map{ it -> [it[0 ]. split(' _' )[0 ], it[1 ], it[2 ]] }. unique{ it -> it[0 ] }
20
31
}
32
+
21
33
run_shovill = params. unicycler ? false : true
22
34
run_unicycler = run_shovill ? false : true
23
35
run_prokka = params. bakta ? false : true
24
36
run_bakta = run_prokka ? false : true
25
37
26
38
main :
39
+ hash_files(ch_fastq. map{ it -> [it[0 ], [it[1 ], it[2 ]]] }. combine(Channel . of(" fastq-input" )))
40
+
27
41
fastp(ch_fastq)
28
42
fastp_json_to_csv(fastp. out. json)
43
+
29
44
if (run_shovill) {
30
45
ch_assembly = shovill(fastp. out. trimmed_reads). assembly
31
46
} else {
32
47
ch_assembly = unicycler(fastp. out. trimmed_reads). assembly
33
48
}
49
+
34
50
if (run_prokka) {
35
51
prokka(ch_assembly)
36
52
} else if (run_bakta) {
37
53
bakta(ch_assembly)
38
54
}
55
+
39
56
quast(ch_assembly)
40
- parse_quast_report(quast. out)
57
+ parse_quast_report(quast. out. tsv)
58
+
59
+ ch_provenance = fastp. out. provenance
60
+
61
+ if (run_shovill) {
62
+ ch_provenance = ch_provenance. join(shovill. out. provenance). map{ it -> [it[0 ], [it[1 ], it[2 ]]] }
63
+ }
64
+ if (run_unicycler) {
65
+ ch_provenance = ch_provenance. join(unicycler. out. provenance). map{ it -> [it[0 ], [it[1 ], it[2 ]]] }
66
+ }
67
+ if (run_prokka) {
68
+ ch_provenance = ch_provenance. join(prokka. out. provenance). map{ it -> [it[0 ], it[1 ] << it[2 ]] }
69
+ }
70
+ if (run_bakta) {
71
+ ch_provenance = ch_provenance. join(bakta. out. provenance). map{ it -> [it[0 ], it[1 ] << it[2 ]] }
72
+ }
73
+ ch_provenance = ch_provenance. join(quast. out. provenance). map{ it -> [it[0 ], it[1 ] << it[2 ]] }
74
+
75
+ ch_provenance = ch_provenance. join(hash_files. out. provenance). map{ it -> [it[0 ], it[1 ] << it[2 ]] }
76
+ ch_provenance = ch_provenance. join(ch_fastq. map{ it -> it[0 ] }. combine(ch_pipeline_provenance)). map{ it -> [it[0 ], it[1 ] << it[2 ]] }
77
+ collect_provenance(ch_provenance)
41
78
}
0 commit comments