Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
c469dc2
Template update for nf-core/tools version 2.8
nf-core-bot Apr 28, 2023
005a516
Merge remote-tracking branch 'origin/dev' into nf-core-template-merge…
jfy133 May 4, 2023
76d894e
Fix CHANGELOG and bump all nf-core modules with new container definition
jfy133 May 4, 2023
4211168
Update docs on prodigal update after module updaet
jfy133 May 4, 2023
1e0bec4
Fix local non-biocontainer container stringsd
jfy133 May 4, 2023
ba36cc4
Missing containers
jfy133 May 4, 2023
8519513
[automated] Fix linting with Prettier
nf-core-bot May 4, 2023
9e0596a
Fix gunzip module URL agfain
jfy133 May 4, 2023
ff6a386
Merge branch 'nf-core-template-merge-2.8' of github.com:nf-core/mag i…
jfy133 May 4, 2023
e61d401
Coments after review
jfy133 May 4, 2023
50988c2
Fix malformed error message
jfy133 May 4, 2023
c736615
Revert to template for NfcoreSchema.groovy files
jfy133 May 4, 2023
17e57aa
Update workflows/mag.nf
jfy133 May 4, 2023
da67c16
Reduce error function specificitiy
jfy133 May 4, 2023
9e816c7
Merge branch 'nf-core-template-merge-2.8' of github.com:nf-core/mag i…
jfy133 May 4, 2023
e85d8f2
Merge branch 'dev' into nf-core-template-merge-2.8
jfy133 May 4, 2023
aa0f471
fix lint erros
jfy133 May 5, 2023
a048606
Fix again
jfy133 May 5, 2023
887df85
Merge branch 'nf-core-template-merge-2.8' of github.com:nf-core/mag i…
jfy133 May 5, 2023
d553497
Apply suggestions from code review
jfy133 May 8, 2023
47a31f9
Fix linting
jfy133 May 8, 2023
1353d6b
Apply suggestions from code review [skip ci]
jfy133 May 8, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,6 @@ To see the the results of a test run with a full size dataset refer to the [resu
For more details about the output files and reports, please refer to the
[output documentation](https://nf-co.re/mag/output).

## Documentation

The nf-core/mag pipeline comes with documentation about the pipeline [usage](https://nf-co.re/mag/usage), [parameters](https://nf-co.re/mag/parameters) and [output](https://nf-co.re/mag/output). Detailed information about how to specify the input can be found under [input specifications](https://nf-co.re/mag/usage#input_specifications).

### Group-wise co-assembly and co-abundance computation

Each sample has an associated group ID (see [input specifications](https://nf-co.re/mag/usage#input_specifications)). This group information can be used for group-wise co-assembly with `MEGAHIT` or `SPAdes` and/or to compute co-abundances for the binning step with `MetaBAT2`. By default, group-wise co-assembly is disabled, while the computation of group-wise co-abundances is enabled. For more information about how this group information can be used see the documentation for the parameters [`--coassemble_group`](https://nf-co.re/mag/parameters#coassemble_group) and [`--binning_map_mode`](https://nf-co.re/mag/parameters#binning_map_mode).
Expand Down
4 changes: 1 addition & 3 deletions docs/usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ input: 'data'

You can also generate such `YAML`/`JSON` files via [nf-core/launch](https://nf-co.re/launch).

See the [nf-core/mag website documentation](https://nf-co.re/mag/usage#usage) for more information about pipeline specific parameters.
See the [nf-core/mag website documentation](https://nf-co.re/mag/parameters) for more information about pipeline specific parameters.

### Updating the pipeline

Expand Down Expand Up @@ -250,8 +250,6 @@ The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementatio
```

> **NB:** If you wish to periodically update individual tool-specific results (e.g. Pangolin) generated by the pipeline then you must ensure to keep the `work/` directory otherwise the `-resume` ability of the pipeline will be compromised and it will restart from scratch.
>
> > > > > > > origin/dev

### nf-core/configs

Expand Down
14 changes: 7 additions & 7 deletions lib/NfcoreSchema.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ class NfcoreSchema {
for (specifiedParam in params.keySet()) {
// nextflow params
if (nf_params.contains(specifiedParam)) {
log.error "ERROR: You used a core Nextflow option with two hyphens: '--${specifiedParam}'. Please resubmit with '-${specifiedParam}'"
Nextflow.error("ERROR: You used a core Nextflow option with two hyphens: '--${specifiedParam}'. Please resubmit with '-${specifiedParam}'")
has_error = true
}
// unexpected params
Expand Down Expand Up @@ -158,7 +158,7 @@ class NfcoreSchema {
schema.validate(params_json)
} catch (ValidationException e) {
println ''
log.error 'ERROR: Validation of pipeline parameters failed!'
Nextflow.error('ERROR: Validation of pipeline parameters failed!')
JSONObject exceptionJSON = e.toJSON()
printExceptions(exceptionJSON, params_json, log, enums)
println ''
Expand Down Expand Up @@ -340,11 +340,11 @@ class NfcoreSchema {
def m = ex_json['message'] =~ /required key \[([^\]]+)\] not found/
// Missing required param
if (m.matches()) {
log.error "* Missing required parameter: --${m[0][1]}"
Nextflow.error("* Missing required parameter: --${m[0][1]}")
}
// Other base-level error
else if (ex_json['pointerToViolation'] == '#') {
log.error "* ${ex_json['message']}"
Nextflow.error("* ${ex_json['message']}")
}
// Error with specific param
else {
Expand All @@ -353,12 +353,12 @@ class NfcoreSchema {
if (enums.containsKey(param)) {
def error_msg = "* --${param}: '${param_val}' is not a valid choice (Available choices"
if (enums[param].size() > limit) {
log.error "${error_msg} (${limit} of ${enums[param].size()}): ${enums[param][0..limit-1].join(', ')}, ... )"
Nextflow.error("${error_msg} (${limit} of ${enums[param].size()}): ${enums[param][0..limit-1].join(', ')}, ... )")
} else {
log.error "${error_msg}: ${enums[param].join(', ')})"
Nextflow.error("${error_msg}: ${enums[param].join(', ')})")
}
} else {
log.error "* --${param}: ${ex_json['message']} (${param_val})"
Nextflow.error("* --${param}: ${ex_json['message']} (${param_val})")
}
}
}
Expand Down
59 changes: 20 additions & 39 deletions lib/WorkflowMag.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,18 @@ class WorkflowMag {
public static void initialise(params, log, hybrid) {
// Check if binning mapping mode is valid
if (!['all', 'group', 'own'].contains(params.binning_map_mode)) {
log.error "Invalid parameter '--binning_map_mode ${params.binning_map_mode}'. Valid values are 'all', 'group' or 'own'."
System.exit(1)
Nextflow.error("Invalid parameter '--binning_map_mode ${params.binning_map_mode}'. Valid values are 'all', 'group' or 'own'.")
}
if (params.coassemble_group && params.binning_map_mode == 'own') {
log.error "Invalid combination of parameter '--binning_map_mode own' and parameter '--coassemble_group'. Select either 'all' or 'group' mapping mode when performing group-wise co-assembly."
System.exit(1)
Nextflow.error("Invalid combination of parameter '--binning_map_mode own' and parameter '--coassemble_group'. Select either 'all' or 'group' mapping mode when performing group-wise co-assembly.")
}

// Check if specified cpus for SPAdes are available
if ( params.spades_fix_cpus > params.max_cpus ) {
log.error "Invalid parameter '--spades_fix_cpus ${params.spades_fix_cpus}', max cpus are '${params.max_cpus}'."
System.exit(1)
Nextflow.error("Invalid parameter '--spades_fix_cpus ${params.spades_fix_cpus}', max cpus are '${params.max_cpus}'.")
}
if ( params.spadeshybrid_fix_cpus > params.max_cpus ) {
log.error "Invalid parameter '--spadeshybrid_fix_cpus ${params.spadeshybrid_fix_cpus}', max cpus are '${params.max_cpus}'."
System.exit(1)
Nextflow.error("Invalid parameter '--spadeshybrid_fix_cpus ${params.spadeshybrid_fix_cpus}', max cpus are '${params.max_cpus}'.")
}
// Check if settings concerning reproducibility of used tools are consistent and print warning if not
if (params.megahit_fix_cpu_1 || params.spades_fix_cpus != -1 || params.spadeshybrid_fix_cpus != -1) {
Expand All @@ -53,8 +49,7 @@ class WorkflowMag {

// Check if parameters for host contamination removal are valid
if ( params.host_fasta && params.host_genome) {
log.error 'Both host fasta reference and iGenomes genome are specified to remove host contamination! Invalid combination, please specify either --host_fasta or --host_genome.'
System.exit(1)
Nextflow.error('Both host fasta reference and iGenomes genome are specified to remove host contamination! Invalid combination, please specify either --host_fasta or --host_genome.')
}
if ( hybrid && (params.host_fasta || params.host_genome) ) {
log.warn 'Host read removal is only applied to short reads. Long reads might be filtered indirectly by Filtlong, which is set to use read qualities estimated based on k-mer matches to the short, already filtered reads.'
Expand All @@ -64,25 +59,21 @@ class WorkflowMag {
}
if ( params.host_genome ) {
if (!params.genomes) {
log.error 'No config file containing genomes provided!'
System.exit(1)
Nextflow.error('No config file containing genomes provided!')
}
// Check if host genome exists in the config file
if (!params.genomes.containsKey(params.host_genome)) {
log.error '=============================================================================\n' +
Nextflow.error('=============================================================================\n' +
" Host genome '${params.host_genome}' not found in any config files provided to the pipeline.\n" +
' Currently, the available genome keys are:\n' +
" ${params.genomes.keySet().join(', ')}\n" +
'==================================================================================='
System.exit(1)
'===================================================================================')
}
if ( !params.genomes[params.host_genome].fasta ) {
log.error "No fasta file specified for the host genome ${params.host_genome}!"
System.exit(1)
Nextflow.error("No fasta file specified for the host genome ${params.host_genome}!")
}
if ( !params.genomes[params.host_genome].bowtie2 ) {
log.error "No Bowtie 2 index file specified for the host genome ${params.host_genome}!"
System.exit(1)
Nextflow.error("No Bowtie 2 index file specified for the host genome ${params.host_genome}!")
}
}

Expand All @@ -94,42 +85,34 @@ class WorkflowMag {
// Check more than one binner is run for bin refinement (required DAS by Tool)
// If the number of run binners (i.e., number of not-skipped) is more than one, otherwise throw an error
if ( params.refine_bins_dastool && !([ params.skip_metabat2, params.skip_maxbin2, params.skip_concoct ].count(false) > 1) ) {
log.error 'Bin refinement with --refine_bins_dastool requires at least two binners to be running (not skipped). Check input.'
System.exit(1)
Nextflow.error('Bin refinement with --refine_bins_dastool requires at least two binners to be running (not skipped). Check input.')
}

// Check that bin refinement is actually turned on if any of the refined bins are requested for downstream
if (!params.refine_bins_dastool && params.postbinning_input != 'raw_bins_only') {
log.error 'The parameter '--postbinning_input ${ params.postbinning_input }' for downstream steps can only be specified if bin refinement is activated with --refine_bins_dastool! Check input.'
System.exit(1)
Nextflow.error('The parameter '--postbinning_input ${ params.postbinning_input }' for downstream steps can only be specified if bin refinement is activated with --refine_bins_dastool! Check input.')
}

// Check if BUSCO parameters combinations are valid
if (params.skip_binqc && params.binqc_tool == 'checkm') {
log.error 'Both --skip_binqc and --binqc_tool \'checkm\' are specified! Invalid combination, please specify either --skip_binqc or --binqc_tool.'
System.exit(1)
Nextflow.error('Both --skip_binqc and --binqc_tool \'checkm\' are specified! Invalid combination, please specify either --skip_binqc or --binqc_tool.')
}
if (params.skip_binqc) {
if (params.busco_reference) {
log.error 'Both --skip_binqc and --busco_reference are specified! Invalid combination, please specify either --skip_binqc or --binqc_tool \'busco\' with --busco_reference.'
System.exit(1)
Nextflow.error('Both --skip_binqc and --busco_reference are specified! Invalid combination, please specify either --skip_binqc or --binqc_tool \'busco\' with --busco_reference.')
}
if (params.busco_download_path) {
log.error 'Both --skip_binqc and --busco_download_path are specified! Invalid combination, please specify either --skip_binqc or --binqc_tool \'busco\' with --busco_download_path.'
System.exit(1)
Nextflow.error('Both --skip_binqc and --busco_download_path are specified! Invalid combination, please specify either --skip_binqc or --binqc_tool \'busco\' with --busco_download_path.')
}
if (params.busco_auto_lineage_prok) {
log.error 'Both --skip_binqc and --busco_auto_lineage_prok are specified! Invalid combination, please specify either --skip_binqc or --binqc_tool \'busco\' with --busco_auto_lineage_prok.'
System.exit(1)
Nextflow.error('Both --skip_binqc and --busco_auto_lineage_prok are specified! Invalid combination, please specify either --skip_binqc or --binqc_tool \'busco\' with --busco_auto_lineage_prok.')
}
}
if (params.busco_reference && params.busco_download_path) {
log.error 'Both --busco_reference and --busco_download_path are specified! Invalid combination, please specify either --busco_reference or --busco_download_path.'
System.exit(1)
Nextflow.error('Both --busco_reference and --busco_download_path are specified! Invalid combination, please specify either --busco_reference or --busco_download_path.')
}
if (params.busco_auto_lineage_prok && params.busco_reference) {
log.error 'Both --busco_auto_lineage_prok and --busco_reference are specified! Invalid combination, please specify either --busco_auto_lineage_prok or --busco_reference.'
System.exit(1)
Nextflow.error('Both --busco_auto_lineage_prok and --busco_reference are specified! Invalid combination, please specify either --busco_auto_lineage_prok or --busco_reference.')
}

if (params.skip_binqc && params.gtdb) {
Expand All @@ -138,12 +121,10 @@ class WorkflowMag {

// Check if CAT parameters are valid
if (params.cat_db && params.cat_db_generate) {
log.error 'Invalid combination of parameters --cat_db and --cat_db_generate is specified! Please specify either --cat_db or --cat_db_generate.'
System.exit(1)
Nextflow.error('Invalid combination of parameters --cat_db and --cat_db_generate is specified! Please specify either --cat_db or --cat_db_generate.')
}
if (params.save_cat_db && !params.cat_db_generate) {
log.error 'Invalid parameter combination: parameter --save_cat_db specified, but not --cat_db_generate! Note also that the parameter --save_cat_db does not work in combination with --cat_db.'
System.exit(1)
Nextflow.error('Invalid parameter combination: parameter --save_cat_db specified, but not --cat_db_generate! Note also that the parameter --save_cat_db does not work in combination with --cat_db.')
}
}

Expand Down
3 changes: 1 addition & 2 deletions workflows/mag.nf
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ if(hasExtension(params.input, "csv")){
if (row.size() == 5) {
if (row.long_reads) hybrid = true
} else {
log.error "Input samplesheet contains row with ${row.size()} column(s). Expects 5."
System.exit(1)
Nextflow.error("Input samplesheet contains row with ${row.size()} column(s). Expects 5.")
}
}
}
Expand Down