diff --git a/.editorconfig b/.editorconfig
index b6b319077..9b990088a 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -22,3 +22,11 @@ indent_size = unset
[/assets/email*]
indent_size = unset
+
+# ignore Readme
+[README.md]
+indent_style = unset
+
+# ignore python
+[*.{py}]
+indent_style = unset
diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml
index eafcf567b..da5094bd9 100644
--- a/.github/workflows/awsfulltest.yml
+++ b/.github/workflows/awsfulltest.yml
@@ -28,7 +28,7 @@ jobs:
}
profiles: test_full
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: Tower debug log file
path: |
diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml
index 7a4f39dec..0b3d57fbf 100644
--- a/.github/workflows/awstest.yml
+++ b/.github/workflows/awstest.yml
@@ -25,7 +25,7 @@ jobs:
}
profiles: test
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: Tower debug log file
path: |
diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml
index 2a9f5e0ab..edae2a083 100644
--- a/.github/workflows/branch.yml
+++ b/.github/workflows/branch.yml
@@ -19,7 +19,7 @@ jobs:
# NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets
- name: Post PR comment
if: failure()
- uses: mshick/add-pr-comment@v1
+ uses: mshick/add-pr-comment@v2
with:
message: |
## This PR is against the `master` branch :x:
diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml
index 694e90ecb..e37cfda5c 100644
--- a/.github/workflows/clean-up.yml
+++ b/.github/workflows/clean-up.yml
@@ -10,7 +10,7 @@ jobs:
issues: write
pull-requests: write
steps:
- - uses: actions/stale@v7
+ - uses: actions/stale@v9
with:
stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days."
stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful."
diff --git a/.github/workflows/download_pipeline.yml b/.github/workflows/download_pipeline.yml
new file mode 100644
index 000000000..8a3300450
--- /dev/null
+++ b/.github/workflows/download_pipeline.yml
@@ -0,0 +1,67 @@
+name: Test successful pipeline download with 'nf-core download'
+
+# Run the workflow when:
+# - dispatched manually
+# - when a PR is opened or reopened to master branch
+# - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev.
+on:
+ workflow_dispatch:
+ pull_request:
+ types:
+ - opened
+ branches:
+ - master
+ pull_request_target:
+ branches:
+ - master
+
+env:
+ NXF_ANSI_LOG: false
+
+jobs:
+ download:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Install Nextflow
+ uses: nf-core/setup-nextflow@v1
+
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ architecture: "x64"
+ - uses: eWaterCycle/setup-singularity@v7
+ with:
+ singularity-version: 3.8.3
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install git+https://github.com/nf-core/tools.git@dev
+
+ - name: Get the repository name and current branch set as environment variable
+ run: |
+ echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV}
+ echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV}
+ echo "REPO_BRANCH=${GITHUB_REF#refs/heads/}" >> ${GITHUB_ENV}
+
+ - name: Download the pipeline
+ env:
+ NXF_SINGULARITY_CACHEDIR: ./
+ run: |
+ nf-core download ${{ env.REPO_LOWERCASE }} \
+ --revision ${{ env.REPO_BRANCH }} \
+ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \
+ --compress "none" \
+ --container-system 'singularity' \
+ --container-library "quay.io" -l "docker.io" -l "ghcr.io" \
+ --container-cache-utilisation 'amend' \
+ --download-configuration
+
+ - name: Inspect download
+ run: tree ./${{ env.REPOTITLE_LOWERCASE }}
+
+ - name: Run the downloaded pipeline
+ env:
+ NXF_SINGULARITY_CACHEDIR: ./
+ NXF_SINGULARITY_HOME_MOUNT: true
+ run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results
diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml
index 9781ad7c0..a3a31ac93 100644
--- a/.github/workflows/fix-linting.yml
+++ b/.github/workflows/fix-linting.yml
@@ -4,7 +4,7 @@ on:
types: [created]
jobs:
- deploy:
+ fix-linting:
# Only run if comment is on a PR with the main repo, and if it contains the magic keywords
if: >
contains(github.event.comment.html_url, '/pull/') &&
@@ -13,10 +13,17 @@ jobs:
runs-on: ubuntu-latest
steps:
# Use the @nf-core-bot token to check out so we can push later
- - uses: actions/checkout@v4
+ - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
with:
token: ${{ secrets.nf_core_bot_auth_token }}
+ # indication that the linting is being fixed
+ - name: React on comment
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: eyes
+
# Action runs on the issue comment, so we don't get the PR by default
# Use the gh cli to check out the PR
- name: Checkout Pull Request
@@ -24,32 +31,59 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}
- - uses: actions/setup-node@v4
+ # Install and run pre-commit
+ - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
+ with:
+ python-version: 3.11
- - name: Install Prettier
- run: npm install -g prettier @prettier/plugin-php
+ - name: Install pre-commit
+ run: pip install pre-commit
- # Check that we actually need to fix something
- - name: Run 'prettier --check'
- id: prettier_status
- run: |
- if prettier --check ${GITHUB_WORKSPACE}; then
- echo "result=pass" >> $GITHUB_OUTPUT
- else
- echo "result=fail" >> $GITHUB_OUTPUT
- fi
+ - name: Run pre-commit
+ id: pre-commit
+ run: pre-commit run --all-files
+ continue-on-error: true
- - name: Run 'prettier --write'
- if: steps.prettier_status.outputs.result == 'fail'
- run: prettier --write ${GITHUB_WORKSPACE}
+ # indication that the linting has finished
+ - name: react if linting finished succesfully
+ if: steps.pre-commit.outcome == 'success'
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: "+1"
- name: Commit & push changes
- if: steps.prettier_status.outputs.result == 'fail'
+ id: commit-and-push
+ if: steps.pre-commit.outcome == 'failure'
run: |
git config user.email "core@nf-co.re"
git config user.name "nf-core-bot"
git config push.default upstream
git add .
git status
- git commit -m "[automated] Fix linting with Prettier"
+ git commit -m "[automated] Fix code linting"
git push
+
+ - name: react if linting errors were fixed
+ id: react-if-fixed
+ if: steps.commit-and-push.outcome == 'success'
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: hooray
+
+ - name: react if linting errors were not fixed
+ if: steps.commit-and-push.outcome == 'failure'
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: confused
+
+ - name: react if linting errors were not fixed
+ if: steps.commit-and-push.outcome == 'failure'
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ issue-number: ${{ github.event.issue.number }}
+ body: |
+ @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually.
+ See [CI log](https://github.com/nf-core/ampliseq/actions/runs/${{ github.run_id }}) for more details.
diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml
index 905c58e44..81cd098e9 100644
--- a/.github/workflows/linting.yml
+++ b/.github/workflows/linting.yml
@@ -11,61 +11,22 @@ on:
types: [published]
jobs:
- EditorConfig:
+ pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-node@v4
-
- - name: Install editorconfig-checker
- run: npm install -g editorconfig-checker
-
- - name: Run ECLint check
- run: editorconfig-checker -exclude README.md $(find .* -type f | grep -v '.git\|.py\|.md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile')
-
- Prettier:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - uses: actions/setup-node@v4
-
- - name: Install Prettier
- run: npm install -g prettier
-
- - name: Run Prettier --check
- run: prettier --check ${GITHUB_WORKSPACE}
-
- PythonBlack:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - name: Check code lints with Black
- uses: psf/black@stable
-
- # If the above check failed, post a comment on the PR explaining the failure
- - name: Post PR comment
- if: failure()
- uses: mshick/add-pr-comment@v1
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
with:
- message: |
- ## Python linting (`black`) is failing
-
- To keep the code consistent with lots of contributors, we run automated code consistency checks.
- To fix this CI test, please run:
-
- * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black`
- * Fix formatting errors in your pipeline: `black .`
-
- Once you push these changes the test should pass, and you can hide this comment :+1:
+ python-version: 3.11
+ cache: "pip"
- We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help!
+ - name: Install pre-commit
+ run: pip install pre-commit
- Thanks again for your contribution!
- repo-token: ${{ secrets.GITHUB_TOKEN }}
- allow-repeats: false
+ - name: Run pre-commit
+ run: pre-commit run --all-files
nf-core:
runs-on: ubuntu-latest
@@ -76,7 +37,7 @@ jobs:
- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
- - uses: actions/setup-python@v4
+ - uses: actions/setup-python@v5
with:
python-version: "3.11"
architecture: "x64"
@@ -99,7 +60,7 @@ jobs:
- name: Upload linting log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: linting-logs
path: |
diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml
index 0bbcd30f2..147bcd10c 100644
--- a/.github/workflows/linting_comment.yml
+++ b/.github/workflows/linting_comment.yml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download lint results
- uses: dawidd6/action-download-artifact@v2
+ uses: dawidd6/action-download-artifact@v3
with:
workflow: linting.yml
workflow_conclusion: completed
diff --git a/.github/workflows/release-announcements.yml b/.github/workflows/release-announcements.yml
index 6ad339277..21ac3f068 100644
--- a/.github/workflows/release-announcements.yml
+++ b/.github/workflows/release-announcements.yml
@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/setup-python@v4
+ - uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
@@ -56,7 +56,7 @@ jobs:
bsky-post:
runs-on: ubuntu-latest
steps:
- - uses: zentered/bluesky-post-action@v0.0.2
+ - uses: zentered/bluesky-post-action@v0.1.0
with:
post: |
Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}!
diff --git a/.gitpod.yml b/.gitpod.yml
index acf726953..363d5b1d4 100644
--- a/.gitpod.yml
+++ b/.gitpod.yml
@@ -7,6 +7,7 @@ tasks:
- name: unset JAVA_TOOL_OPTIONS
command: |
unset JAVA_TOOL_OPTIONS
+
vscode:
extensions: # based on nf-core.nf-core-extensionpack
- codezombiech.gitignore # Language support for .gitignore files
diff --git a/.nf-core.yml b/.nf-core.yml
index 92b7f03b0..9c60be609 100644
--- a/.nf-core.yml
+++ b/.nf-core.yml
@@ -4,4 +4,13 @@ lint:
- .gitattributes
files_exist:
- conf/igenomes.config
+ nextflow_config:
+ - config_defaults:
+ - params.report_template
+ - params.report_css
+ - params.report_logo
actions_ci: False
+update:
+ https://github.com/nf-core/modules.git:
+ nf-core:
+ mafft: "feb29be775d9e41750180539e9a3bdce801d0609"
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0c31cdb99..af57081f6 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,5 +1,10 @@
repos:
- repo: https://github.com/pre-commit/mirrors-prettier
- rev: "v2.7.1"
+ rev: "v3.1.0"
hooks:
- id: prettier
+ - repo: https://github.com/editorconfig-checker/editorconfig-checker.python
+ rev: "2.7.3"
+ hooks:
+ - id: editorconfig-checker
+ alias: ec
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ca109cb2c..7414f2502 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -11,6 +11,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### `Fixed`
+- [#697](https://github.com/nf-core/ampliseq/pull/697) - Template update for nf-core/tools version 2.12
+
### `Dependencies`
### `Removed`
diff --git a/README.md b/README.md
index 2e04e6cc8..ce07b1cb9 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,9 @@
-# ![nf-core/ampliseq](docs/images/nf-core-ampliseq_logo_light.png#gh-light-mode-only) ![nf-core/ampliseq](docs/images/nf-core-ampliseq_logo_dark.png#gh-dark-mode-only)
-
+
+
+
[![GitHub Actions CI Status](https://github.com/nf-core/ampliseq/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/ampliseq/actions?query=workflow%3A%22nf-core+CI%22)
[![GitHub Actions Linting Status](https://github.com/nf-core/ampliseq/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/ampliseq/actions?query=workflow%3A%22nf-core+linting%22)
[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/ampliseq/results)
diff --git a/assets/email_template.html b/assets/email_template.html
index 505409d41..7fe19bb2f 100644
--- a/assets/email_template.html
+++ b/assets/email_template.html
@@ -12,7 +12,7 @@
-nf-core/ampliseq v${version}
+nf-core/ampliseq ${version}
Run Name: $runName
<% if (!success){
diff --git a/assets/email_template.txt b/assets/email_template.txt
index 8c0dac892..de5fa33b6 100644
--- a/assets/email_template.txt
+++ b/assets/email_template.txt
@@ -4,7 +4,7 @@
|\\ | |__ __ / ` / \\ |__) |__ } {
| \\| | \\__, \\__/ | \\ |___ \\`-._,-`-,
`._,._,'
- nf-core/ampliseq v${version}
+ nf-core/ampliseq ${version}
----------------------------------------------------
Run Name: $runName
diff --git a/assets/nf-core-ampliseq_logo_light.png b/assets/nf-core-ampliseq_logo_light.png
index 58f01531e..0b9000e43 100644
Binary files a/assets/nf-core-ampliseq_logo_light.png and b/assets/nf-core-ampliseq_logo_light.png differ
diff --git a/assets/report_template.Rmd b/assets/report_template.Rmd
index 752a6b17a..07235f738 100644
--- a/assets/report_template.Rmd
+++ b/assets/report_template.Rmd
@@ -134,7 +134,7 @@ htmltools::includeCSS(params$css)
cat(paste0("
"))
@@ -628,8 +628,8 @@ n_mito <- sum(grepl("mito", barrnap_sum$result))
n_euk <- sum(grepl("euk", barrnap_sum$result))
barrnap_df_sum <- data.frame(label=c('Bacteria','Archaea','Mitochondria','Eukaryotes','Unclassified'),
- count=c(n_bac,n_arc,n_mito,n_euk,n_asv_barrnap - n_classified),
- percent=c(round( (n_bac/n_asv_barrnap)*100, 2), round( (n_arc/n_asv_barrnap)*100, 2), round( (n_mito/n_asv_barrnap)*100, 2), round( (n_euk/n_asv_barrnap)*100, 2), round( ( (n_asv_barrnap - n_classified) /n_asv_barrnap)*100, 2) ) )
+ count=c(n_bac,n_arc,n_mito,n_euk,n_asv_barrnap - n_classified),
+ percent=c(round( (n_bac/n_asv_barrnap)*100, 2), round( (n_arc/n_asv_barrnap)*100, 2), round( (n_mito/n_asv_barrnap)*100, 2), round( (n_euk/n_asv_barrnap)*100, 2), round( ( (n_asv_barrnap - n_classified) /n_asv_barrnap)*100, 2) ) )
# Build outputtext
cat( "Barrnap classified ")
@@ -664,7 +664,7 @@ cat("\n\nrRNA classification results can be found in folder [barrnap](../barrnap
filter_ssu_asv <- read.table( params$filter_ssu_asv, header = FALSE, sep = "\t", stringsAsFactors = FALSE)
filter_ssu_asv_filtered <- nrow(filter_ssu_asv)/2
- # "n_asv_barrnap" is taken from the barrnap block above
+# "n_asv_barrnap" is taken from the barrnap block above
cat(paste0("
ASVs were filtered for `",params$filter_ssu,"` (`bac`: Bacteria, `arc`: Archaea, `mito`: Mitochondria, `euk`: Eukaryotes) using the above classification.
The number of ASVs was reduced by ",n_asv_barrnap-filter_ssu_asv_filtered,
@@ -941,8 +941,8 @@ asv_classi_df <- data.frame(level, n_asv_classified, p_asv_classified)
# Build output string
outputstr <- "DADA2 classified "
for (row in seq_len(nrow(asv_classi_df))) {
- outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
- " % ASVs at ", asv_classi_df[row, ]$level, " level, ")
+ outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
+ " % ASVs at ", asv_classi_df[row, ]$level, " level, ")
}
outputstr <- substr(outputstr, 1, nchar(outputstr)-2)
outputstr <- paste0(outputstr, ".\n\n")
@@ -982,7 +982,7 @@ if ( !isFALSE(params$qiime2_ref_tax_title) ) {
More details about the reference taxonomy database can be found in the ['Methods section'](#methods).\n\n", sep = "")
} else {
cat("The taxonomic classification was performed by [QIIME2](https://www.nature.com/articles/s41587-019-0209-9) using a custom database ",
- "provided by the user.\n\n", sep = "")
+ "provided by the user.\n\n", sep = "")
}
# Read file and prepare table
@@ -1018,8 +1018,8 @@ asv_classi_df <- data.frame(level, n_asv_classified, p_asv_classified)
# Build output string
outputstr <- "QIIME2 classified "
for (row in seq_len(nrow(asv_classi_df))) {
- outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
- " % ASVs at ", asv_classi_df[row, ]$level, " level, ")
+ outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
+ " % ASVs at ", asv_classi_df[row, ]$level, " level, ")
}
outputstr <- substr(outputstr, 1, nchar(outputstr)-2)
outputstr <- paste0(outputstr, ".\n\n")
@@ -1075,8 +1075,8 @@ asv_classi_df <- data.frame(level, n_asv_classified, p_asv_classified)
# Build output string
outputstr <- "SINTAX classified "
for (row in seq_len(nrow(asv_classi_df))) {
- outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
- " % ASVs at ", asv_classi_df[row, ]$level, " level, ")
+ outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
+ " % ASVs at ", asv_classi_df[row, ]$level, " level, ")
}
outputstr <- substr(outputstr, 1, nchar(outputstr)-2)
outputstr <- paste0(outputstr, ".\n\n")
@@ -1140,8 +1140,8 @@ asv_classi_df <- data.frame(level, n_asv_classified, p_asv_classified)
# Build output string
outputstr <- "Kraken2 classified "
for (row in seq_len(nrow(asv_classi_df))) {
- outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
- " % ASVs at ", asv_classi_df[row, ]$level, " level, ")
+ outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
+ " % ASVs at ", asv_classi_df[row, ]$level, " level, ")
}
outputstr <- substr(outputstr, 1, nchar(outputstr)-2)
outputstr <- paste0(outputstr, ".\n\n")
@@ -1202,8 +1202,8 @@ asv_classi_df <- data.frame(level, n_asv_classified, p_asv_classified)
# Build output string
outputstr <- "Phylogenetic Placement classified "
for (row in seq_len(nrow(asv_classi_df))) {
- outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
- " % ASVs at taxonomic level ", asv_classi_df[row, ]$level, ", ")
+ outputstr <- paste0(outputstr, asv_classi_df[row, ]$p_asv_classified,
+ " % ASVs at taxonomic level ", asv_classi_df[row, ]$level, ", ")
}
outputstr <- substr(outputstr, 1, nchar(outputstr)-2)
outputstr <- paste0(outputstr, ".\n\n")
diff --git a/docs/images/nf-core-ampliseq_logo_dark.png b/docs/images/nf-core-ampliseq_logo_dark.png
index 68c86a6a1..9de168569 100644
Binary files a/docs/images/nf-core-ampliseq_logo_dark.png and b/docs/images/nf-core-ampliseq_logo_dark.png differ
diff --git a/docs/images/nf-core-ampliseq_logo_light.png b/docs/images/nf-core-ampliseq_logo_light.png
index 58f01531e..a59dfc48c 100644
Binary files a/docs/images/nf-core-ampliseq_logo_light.png and b/docs/images/nf-core-ampliseq_logo_light.png differ
diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy
index 4b7ec2afc..5d1f9c083 100755
--- a/lib/WorkflowMain.groovy
+++ b/lib/WorkflowMain.groovy
@@ -25,7 +25,7 @@ class WorkflowMain {
//
// Validate parameters and print summary to screen
//
- public static void initialise(workflow, params, log) {
+ public static void initialise(workflow, params, log, args) {
// Check that keys for reference databases are valid
if (params.dada_ref_taxonomy && !params.skip_taxonomy && !params.skip_dada_taxonomy) {
@@ -47,6 +47,8 @@ class WorkflowMain {
// Check that a -profile or Nextflow config has been provided to run the pipeline
NfcoreTemplate.checkConfigProvided(workflow, log)
+ // Check that the profile doesn't contain spaces and doesn't end with a trailing comma
+ checkProfile(workflow.profile, args, log)
// Check that conda channels are set-up correctly
if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) {
@@ -96,4 +98,16 @@ class WorkflowMain {
Nextflow.error(error_string)
}
}
+
+ //
+ // Exit pipeline if --profile contains spaces
+ //
+ private static void checkProfile(profile, args, log) {
+ if (profile.endsWith(',')) {
+ Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
+ }
+ if (args[0]) {
+ log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
+ }
+ }
}
diff --git a/lib/nfcore_external_java_deps.jar b/lib/nfcore_external_java_deps.jar
deleted file mode 100644
index 805c8bb5e..000000000
Binary files a/lib/nfcore_external_java_deps.jar and /dev/null differ
diff --git a/main.nf b/main.nf
index 4c4846049..dd35beca5 100644
--- a/main.nf
+++ b/main.nf
@@ -33,7 +33,7 @@ if (params.validate_params) {
validateParameters()
}
-WorkflowMain.initialise(workflow, params, log)
+WorkflowMain.initialise(workflow, params, log, args)
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/modules.json b/modules.json
index 595c024f8..f94caaac4 100644
--- a/modules.json
+++ b/modules.json
@@ -7,7 +7,7 @@
"nf-core": {
"custom/dumpsoftwareversions": {
"branch": "master",
- "git_sha": "bba7e362e4afead70653f84d8700588ea28d0f9e",
+ "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93",
"installed_by": ["modules"]
},
"cutadapt": {
@@ -17,52 +17,52 @@
},
"epang/place": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"epang/split": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"fastqc": {
"branch": "master",
- "git_sha": "65ad3e0b9a4099592e1102e92e10455dc661cf53",
+ "git_sha": "c9488585ce7bd35ccd2a30faa2371454c8112fb9",
"installed_by": ["modules"]
},
"gappa/examineassign": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"gappa/examinegraft": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"gappa/examineheattree": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"hmmer/eslalimask": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"hmmer/eslreformat": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"hmmer/hmmalign": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"hmmer/hmmbuild": {
"branch": "master",
- "git_sha": "911696ea0b62df80e900ef244d7867d177971f73",
+ "git_sha": "3f5420aa22e00bd030a2556dfdffc9e164ec0ec5",
"installed_by": ["fasta_newick_epang_gappa"]
},
"kraken2/kraken2": {
@@ -78,7 +78,7 @@
},
"multiqc": {
"branch": "master",
- "git_sha": "4ab13872435962dadc239979554d13709e20bf29",
+ "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93",
"installed_by": ["modules"]
},
"pigz/uncompress": {
@@ -112,7 +112,7 @@
"nf-core": {
"fasta_newick_epang_gappa": {
"branch": "master",
- "git_sha": "dedc0e31087f3306101c38835d051bf49789445a",
+ "git_sha": "cfd937a668919d948f6fcbf4218e79de50c2f36f",
"installed_by": ["subworkflows"]
}
}
diff --git a/modules/nf-core/custom/dumpsoftwareversions/environment.yml b/modules/nf-core/custom/dumpsoftwareversions/environment.yml
index f0c63f698..9b3272bc1 100644
--- a/modules/nf-core/custom/dumpsoftwareversions/environment.yml
+++ b/modules/nf-core/custom/dumpsoftwareversions/environment.yml
@@ -4,4 +4,4 @@ channels:
- bioconda
- defaults
dependencies:
- - bioconda::multiqc=1.17
+ - bioconda::multiqc=1.19
diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf
index 7685b33cd..f2187611c 100644
--- a/modules/nf-core/custom/dumpsoftwareversions/main.nf
+++ b/modules/nf-core/custom/dumpsoftwareversions/main.nf
@@ -4,8 +4,8 @@ process CUSTOM_DUMPSOFTWAREVERSIONS {
// Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/multiqc:1.17--pyhdfd78af_0' :
- 'biocontainers/multiqc:1.17--pyhdfd78af_0' }"
+ 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' :
+ 'biocontainers/multiqc:1.19--pyhdfd78af_0' }"
input:
path versions
diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
index eec1db10a..b1e1630bb 100644
--- a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
+++ b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
@@ -31,7 +31,12 @@ nextflow_process {
then {
assertAll(
{ assert process.success },
- { assert snapshot(process.out).match() }
+ { assert snapshot(
+ process.out.versions,
+ file(process.out.mqc_yml[0]).readLines()[0..10],
+ file(process.out.yml[0]).readLines()[0..7]
+ ).match()
+ }
)
}
}
diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
index 4274ed57a..5f59a936d 100644
--- a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
+++ b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
@@ -1,27 +1,33 @@
{
"Should run without failures": {
"content": [
- {
- "0": [
- "software_versions.yml:md5,1c851188476409cda5752ce971b20b58"
- ],
- "1": [
- "software_versions_mqc.yml:md5,2570f4ba271ad08357b0d3d32a9cf84d"
- ],
- "2": [
- "versions.yml:md5,3843ac526e762117eedf8825b40683df"
- ],
- "mqc_yml": [
- "software_versions_mqc.yml:md5,2570f4ba271ad08357b0d3d32a9cf84d"
- ],
- "versions": [
- "versions.yml:md5,3843ac526e762117eedf8825b40683df"
- ],
- "yml": [
- "software_versions.yml:md5,1c851188476409cda5752ce971b20b58"
- ]
- }
+ [
+ "versions.yml:md5,76d454d92244589d32455833f7c1ba6d"
+ ],
+ [
+ "data: \"\\n\\n \\n \\n Process Name | \\n \\",
+ " \\ Software | \\n Version | \\n
\\n \\n\\",
+ " \\n\\n\\n CUSTOM_DUMPSOFTWAREVERSIONS | \\n python | \\n\\",
+ " \\ 3.11.7 | \\n
\\n\\n\\n | \\n \\",
+ " \\ yaml | \\n 5.4.1 | \\n
\\n\\n\\n\\",
+ " \\n\\n TOOL1 | \\n tool1 | \\n\\",
+ " \\ 0.11.9 | \\n
\\n\\n\\n\\n\\n TOOL2 | \\n\\",
+ " \\ tool2 | \\n 1.9 | \\n
\\n\\n\\n\\",
+ " \\n\\n Workflow | \\n Nextflow | \\n\\"
+ ],
+ [
+ "CUSTOM_DUMPSOFTWAREVERSIONS:",
+ " python: 3.11.7",
+ " yaml: 5.4.1",
+ "TOOL1:",
+ " tool1: 0.11.9",
+ "TOOL2:",
+ " tool2: '1.9'",
+ "Workflow:"
+ ]
],
- "timestamp": "2023-11-03T14:43:22.157011"
+ "timestamp": "2024-01-09T23:01:18.710682"
}
-}
+}
\ No newline at end of file
diff --git a/modules/nf-core/epang/place/environment.yml b/modules/nf-core/epang/place/environment.yml
new file mode 100644
index 000000000..a8536b1d4
--- /dev/null
+++ b/modules/nf-core/epang/place/environment.yml
@@ -0,0 +1,7 @@
+name: epang_place
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::epa-ng=0.3.8
diff --git a/modules/nf-core/epang/place/main.nf b/modules/nf-core/epang/place/main.nf
index 799acaa19..217975ebe 100644
--- a/modules/nf-core/epang/place/main.nf
+++ b/modules/nf-core/epang/place/main.nf
@@ -2,7 +2,7 @@ process EPANG_PLACE {
tag "$meta.id"
label 'process_high'
- conda "bioconda::epa-ng=0.3.8"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/epa-ng:0.3.8--h9a82719_1':
'biocontainers/epa-ng:0.3.8--h9a82719_1' }"
diff --git a/modules/nf-core/epang/place/meta.yml b/modules/nf-core/epang/place/meta.yml
index 483f4bad7..7d31a3497 100644
--- a/modules/nf-core/epang/place/meta.yml
+++ b/modules/nf-core/epang/place/meta.yml
@@ -12,7 +12,6 @@ tools:
tool_dev_url: "https://github.com/Pbdas/epa-ng"
doi: "10.1093/sysbio/syy054"
licence: "['GNU Affero General Public License v3.0']"
-
input:
- meta:
type: map
@@ -39,7 +38,6 @@ input:
type: file
description: file argument to the --binary parameter
pattern: "*"
-
output:
- meta:
type: map
@@ -61,6 +59,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/modules/nf-core/epang/split/environment.yml b/modules/nf-core/epang/split/environment.yml
new file mode 100644
index 000000000..5dd7ff771
--- /dev/null
+++ b/modules/nf-core/epang/split/environment.yml
@@ -0,0 +1,7 @@
+name: epang_split
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::epa-ng=0.3.8
diff --git a/modules/nf-core/epang/split/main.nf b/modules/nf-core/epang/split/main.nf
index fe372541a..67b534911 100644
--- a/modules/nf-core/epang/split/main.nf
+++ b/modules/nf-core/epang/split/main.nf
@@ -2,7 +2,7 @@ process EPANG_SPLIT {
tag "$meta.id"
label 'process_single'
- conda "bioconda::epa-ng=0.3.8"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/epa-ng:0.3.8--h9a82719_1':
'biocontainers/epa-ng:0.3.8--h9a82719_1' }"
diff --git a/modules/nf-core/epang/split/meta.yml b/modules/nf-core/epang/split/meta.yml
index b5d915a46..5af4234f0 100644
--- a/modules/nf-core/epang/split/meta.yml
+++ b/modules/nf-core/epang/split/meta.yml
@@ -12,7 +12,6 @@ tools:
tool_dev_url: "https://github.com/Pbdas/epa-ng"
doi: "10.1093/sysbio/syy054"
licence: "['GNU Affero General Public License v3.0']"
-
input:
# Only when we have meta
- meta:
@@ -28,7 +27,6 @@ input:
type: file
description: full alignment in any supported format to split into reference and query alignments
pattern: "*.{faa,fna,fa,fasta,fa,phy,aln,alnfaa,alnfna,alnfa,mfa,faa.gz,fna.gz,fa.gz,fasta.gz,fa.gz,phy.gz,aln.gz,alnfaa.gz,alnfna.gz,alnfa.gz,mfa.gz}"
-
output:
- meta:
type: map
@@ -45,6 +43,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/modules/nf-core/fastqc/tests/main.nf.test b/modules/nf-core/fastqc/tests/main.nf.test
index b9e8f926e..1f21c6646 100644
--- a/modules/nf-core/fastqc/tests/main.nf.test
+++ b/modules/nf-core/fastqc/tests/main.nf.test
@@ -3,24 +3,20 @@ nextflow_process {
name "Test Process FASTQC"
script "../main.nf"
process "FASTQC"
+
tag "modules"
tag "modules_nfcore"
tag "fastqc"
- test("Single-Read") {
+ test("sarscov2 single-end [fastq]") {
when {
- params {
- outdir = "$outputDir"
- }
process {
"""
- input[0] = [
+ input[0] = Channel.of([
[ id: 'test', single_end:true ],
- [
- file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)
- ]
- ]
+ [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ]
+ ])
"""
}
}
@@ -28,82 +24,189 @@ nextflow_process {
then {
assertAll (
{ assert process.success },
+
// NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it.
// looks like this:
// https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039
- { assert process.out.html.get(0).get(1) ==~ ".*/test_fastqc.html" },
- { assert path(process.out.html.get(0).get(1)).getText().contains("
File type | Conventional base calls |
") },
- { assert snapshot(process.out.versions).match("versions") },
- { assert process.out.zip.get(0).get(1) ==~ ".*/test_fastqc.zip" }
+
+ { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" },
+ { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
+ { assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
+
+ { assert snapshot(process.out.versions).match("versions") }
+ )
+ }
+ }
+
+ test("sarscov2 paired-end [fastq]") {
+
+ when {
+ process {
+ """
+ input[0] = Channel.of([
+ [id: 'test', single_end: false], // meta map
+ [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true),
+ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ]
+ ])
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+
+ { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" },
+ { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" },
+ { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" },
+ { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" },
+ { assert path(process.out.html[0][1][0]).text.contains("File type | Conventional base calls |
") },
+ { assert path(process.out.html[0][1][1]).text.contains("File type | Conventional base calls |
") },
+
+ { assert snapshot(process.out.versions).match("versions") }
+ )
+ }
+ }
+
+ test("sarscov2 interleaved [fastq]") {
+
+ when {
+ process {
+ """
+ input[0] = Channel.of([
+ [id: 'test', single_end: false], // meta map
+ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true)
+ ])
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+
+ { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" },
+ { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
+ { assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
+
+ { assert snapshot(process.out.versions).match("versions") }
)
}
}
-// TODO
-// //
-// // Test with paired-end data
-// //
-// workflow test_fastqc_paired_end {
-// input = [
-// [id: 'test', single_end: false], // meta map
-// [
-// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true),
-// file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)
-// ]
-// ]
-
-// FASTQC ( input )
-// }
-
-// //
-// // Test with interleaved data
-// //
-// workflow test_fastqc_interleaved {
-// input = [
-// [id: 'test', single_end: false], // meta map
-// file(params.test_data['sarscov2']['illumina']['test_interleaved_fastq_gz'], checkIfExists: true)
-// ]
-
-// FASTQC ( input )
-// }
-
-// //
-// // Test with bam data
-// //
-// workflow test_fastqc_bam {
-// input = [
-// [id: 'test', single_end: false], // meta map
-// file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true)
-// ]
-
-// FASTQC ( input )
-// }
-
-// //
-// // Test with multiple samples
-// //
-// workflow test_fastqc_multiple {
-// input = [
-// [id: 'test', single_end: false], // meta map
-// [
-// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true),
-// file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true),
-// file(params.test_data['sarscov2']['illumina']['test2_1_fastq_gz'], checkIfExists: true),
-// file(params.test_data['sarscov2']['illumina']['test2_2_fastq_gz'], checkIfExists: true)
-// ]
-// ]
-
-// FASTQC ( input )
-// }
-
-// //
-// // Test with custom prefix
-// //
-// workflow test_fastqc_custom_prefix {
-// input = [
-// [ id:'mysample', single_end:true ], // meta map
-// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)
-// ]
-
-// FASTQC ( input )
-// }
+
+ test("sarscov2 paired-end [bam]") {
+
+ when {
+ process {
+ """
+ input[0] = Channel.of([
+ [id: 'test', single_end: false], // meta map
+ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true)
+ ])
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+
+ { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" },
+ { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
+ { assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
+
+ { assert snapshot(process.out.versions).match("versions") }
+ )
+ }
+ }
+
+ test("sarscov2 multiple [fastq]") {
+
+ when {
+ process {
+ """
+ input[0] = Channel.of([
+ [id: 'test', single_end: false], // meta map
+ [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true),
+ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true),
+ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true),
+ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ]
+ ])
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+
+ { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" },
+ { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" },
+ { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" },
+ { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" },
+ { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" },
+ { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" },
+ { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" },
+ { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" },
+ { assert path(process.out.html[0][1][0]).text.contains("File type | Conventional base calls |
") },
+ { assert path(process.out.html[0][1][1]).text.contains("File type | Conventional base calls |
") },
+ { assert path(process.out.html[0][1][2]).text.contains("File type | Conventional base calls |
") },
+ { assert path(process.out.html[0][1][3]).text.contains("File type | Conventional base calls |
") },
+
+ { assert snapshot(process.out.versions).match("versions") }
+ )
+ }
+ }
+
+ test("sarscov2 custom_prefix") {
+
+ when {
+ process {
+ """
+ input[0] = Channel.of([
+ [ id:'mysample', single_end:true ], // meta map
+ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true)
+ ])
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+
+ { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" },
+ { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" },
+ { assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
+
+ { assert snapshot(process.out.versions).match("versions") }
+ )
+ }
+ }
+
+ test("sarscov2 single-end [fastq] - stub") {
+
+ options "-stub"
+
+ when {
+ process {
+ """
+ input[0] = Channel.of([
+ [ id: 'test', single_end:true ],
+ [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ]
+ ])
+ """
+ }
+ }
+
+ then {
+ assertAll (
+ { assert process.success },
+ { assert snapshot(process.out.html.collect { file(it[1]).getName() } +
+ process.out.zip.collect { file(it[1]).getName() } +
+ process.out.versions ).match() }
+ )
+ }
+ }
+
}
diff --git a/modules/nf-core/fastqc/tests/main.nf.test.snap b/modules/nf-core/fastqc/tests/main.nf.test.snap
index 636a32cea..5d624bb82 100644
--- a/modules/nf-core/fastqc/tests/main.nf.test.snap
+++ b/modules/nf-core/fastqc/tests/main.nf.test.snap
@@ -1,10 +1,20 @@
{
+ "sarscov2 single-end [fastq] - stub": {
+ "content": [
+ [
+ "test.html",
+ "test.zip",
+ "versions.yml:md5,e1cc25ca8af856014824abd842e93978"
+ ]
+ ],
+ "timestamp": "2024-01-17T18:40:57.254299"
+ },
"versions": {
"content": [
[
"versions.yml:md5,e1cc25ca8af856014824abd842e93978"
]
],
- "timestamp": "2023-10-09T23:40:54+0000"
+ "timestamp": "2024-01-17T18:36:50.033627"
}
}
\ No newline at end of file
diff --git a/modules/nf-core/gappa/examineassign/environment.yml b/modules/nf-core/gappa/examineassign/environment.yml
new file mode 100644
index 000000000..4930e7245
--- /dev/null
+++ b/modules/nf-core/gappa/examineassign/environment.yml
@@ -0,0 +1,7 @@
+name: gappa_examineassign
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gappa=0.8.0
diff --git a/modules/nf-core/gappa/examineassign/main.nf b/modules/nf-core/gappa/examineassign/main.nf
index 6d920b83d..940a61077 100644
--- a/modules/nf-core/gappa/examineassign/main.nf
+++ b/modules/nf-core/gappa/examineassign/main.nf
@@ -2,7 +2,7 @@ process GAPPA_EXAMINEASSIGN {
tag "$meta.id"
label 'process_medium'
- conda "bioconda::gappa=0.8.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gappa:0.8.0--h9a82719_0':
'biocontainers/gappa:0.8.0--h9a82719_0' }"
diff --git a/modules/nf-core/gappa/examineassign/meta.yml b/modules/nf-core/gappa/examineassign/meta.yml
index 501c272b3..1753b1811 100644
--- a/modules/nf-core/gappa/examineassign/meta.yml
+++ b/modules/nf-core/gappa/examineassign/meta.yml
@@ -13,7 +13,6 @@ tools:
tool_dev_url: "https://github.com/lczech/gappa"
doi: "10.1093/bioinformatics/btaa070"
licence: "['GPL v3']"
-
input:
- meta:
type: map
@@ -24,7 +23,6 @@ input:
type: file
description: jplace file output from phylogenetic placement, e.g. EPA-NG, gzipped or not
pattern: "*.{jplace,jplace.gz}"
-
output:
- meta:
type: map
@@ -50,6 +48,7 @@ output:
type: file
description: File containing software versions
pattern: "versions.yml"
-
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/modules/nf-core/gappa/examinegraft/environment.yml b/modules/nf-core/gappa/examinegraft/environment.yml
new file mode 100644
index 000000000..c22460d20
--- /dev/null
+++ b/modules/nf-core/gappa/examinegraft/environment.yml
@@ -0,0 +1,7 @@
+name: gappa_examinegraft
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gappa=0.8.0
diff --git a/modules/nf-core/gappa/examinegraft/main.nf b/modules/nf-core/gappa/examinegraft/main.nf
index f10aaa317..3efed466c 100644
--- a/modules/nf-core/gappa/examinegraft/main.nf
+++ b/modules/nf-core/gappa/examinegraft/main.nf
@@ -2,7 +2,7 @@ process GAPPA_EXAMINEGRAFT {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gappa=0.8.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gappa:0.8.0--h9a82719_0':
'biocontainers/gappa:0.8.0--h9a82719_0' }"
diff --git a/modules/nf-core/gappa/examinegraft/meta.yml b/modules/nf-core/gappa/examinegraft/meta.yml
index 0e813c85f..9dcb56bcc 100644
--- a/modules/nf-core/gappa/examinegraft/meta.yml
+++ b/modules/nf-core/gappa/examinegraft/meta.yml
@@ -10,7 +10,6 @@ tools:
tool_dev_url: "https://github.com/lczech/gappa"
doi: "10.1093/bioinformatics/btaa070"
licence: "['GPL v3']"
-
input:
- meta:
type: map
@@ -21,7 +20,6 @@ input:
type: file
description: jplace file output from phylogenetic placement, e.g. EPA-NG, gzipped or not
pattern: "*.{jplace,jplace.gz}"
-
output:
- meta:
type: map
@@ -36,6 +34,7 @@ output:
type: file
description: phylogenetic tree file in newick format
pattern: "*.newick"
-
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/modules/nf-core/gappa/examineheattree/environment.yml b/modules/nf-core/gappa/examineheattree/environment.yml
new file mode 100644
index 000000000..a5ee8d48f
--- /dev/null
+++ b/modules/nf-core/gappa/examineheattree/environment.yml
@@ -0,0 +1,7 @@
+name: gappa_examineheattree
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::gappa=0.8.0
diff --git a/modules/nf-core/gappa/examineheattree/main.nf b/modules/nf-core/gappa/examineheattree/main.nf
index 5a6105c01..27368f010 100644
--- a/modules/nf-core/gappa/examineheattree/main.nf
+++ b/modules/nf-core/gappa/examineheattree/main.nf
@@ -2,7 +2,7 @@ process GAPPA_EXAMINEHEATTREE {
tag "$meta.id"
label 'process_low'
- conda "bioconda::gappa=0.8.0"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gappa:0.8.0--h9a82719_0':
'biocontainers/gappa:0.8.0--h9a82719_0' }"
diff --git a/modules/nf-core/gappa/examineheattree/meta.yml b/modules/nf-core/gappa/examineheattree/meta.yml
index badf97193..35e0c1e4f 100644
--- a/modules/nf-core/gappa/examineheattree/meta.yml
+++ b/modules/nf-core/gappa/examineheattree/meta.yml
@@ -13,7 +13,6 @@ tools:
tool_dev_url: "https://github.com/lczech/gappa"
doi: "10.1093/bioinformatics/btaa070"
licence: "['GPL v3']"
-
input:
- meta:
type: map
@@ -24,7 +23,6 @@ input:
type: file
description: jplace file output from phylogenetic placement, e.g. EPA-NG, gzipped or not
pattern: "*.{jplace,jplace.gz}"
-
output:
- meta:
type: map
@@ -59,6 +57,7 @@ output:
type: file
description: log file from the run
pattern: "*.log"
-
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/modules/nf-core/hmmer/eslalimask/environment.yml b/modules/nf-core/hmmer/eslalimask/environment.yml
new file mode 100644
index 000000000..ed14ff863
--- /dev/null
+++ b/modules/nf-core/hmmer/eslalimask/environment.yml
@@ -0,0 +1,7 @@
+name: hmmer_eslalimask
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::hmmer=3.3.2
diff --git a/modules/nf-core/hmmer/eslalimask/main.nf b/modules/nf-core/hmmer/eslalimask/main.nf
index 83ceae4c7..e02904644 100644
--- a/modules/nf-core/hmmer/eslalimask/main.nf
+++ b/modules/nf-core/hmmer/eslalimask/main.nf
@@ -2,7 +2,7 @@ process HMMER_ESLALIMASK {
tag "$meta.id"
label 'process_single'
- conda "bioconda::hmmer=3.3.2"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1':
'biocontainers/hmmer:3.3.2--h1b792b2_1' }"
diff --git a/modules/nf-core/hmmer/eslalimask/meta.yml b/modules/nf-core/hmmer/eslalimask/meta.yml
index 238c7393b..c4a9a4ccf 100644
--- a/modules/nf-core/hmmer/eslalimask/meta.yml
+++ b/modules/nf-core/hmmer/eslalimask/meta.yml
@@ -9,97 +9,79 @@ tools:
description: "Biosequence analysis using profile hidden Markov models"
homepage: http://hmmer.org/
documentation: http://hmmer.org/documentation.html
-
doi: "10.1371/journal.pcbi.1002195"
licence: ["BSD-3-Clause"]
-
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test' ]
-
- unmaskedaln:
type: file
description: multiple sequence alignment, Stockholm or other formats
pattern: "*"
-
- fmask_rf:
type: val
description: Flag to output optional file with final mask of non-gap RF len
-
- fmask_all:
type: val
description: Flag to output optional file with final mask of full aln len
-
- gmask_rf:
type: val
description: Flag to output optional file gap-based 0/1 mask of non-gap RF len
-
- gmask_all:
type: val
description: Flag to output optional file gap-based 0/1 mask of full aln len
-
- pmask_rf:
type: val
description: Flag to output optional file with PP-based 0/1 mask of non-gap RF len
-
- pmask_all:
type: val
description: Flag to output optional file with PP-based 0/1 mask of full aln len
-
- maskfile:
type: file
description: mask file, see program documentation
pattern: "*"
-
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
-
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
-
- maskedaln:
type: file
description: Masked alignment in gzipped Stockholm format
pattern: "*.sthlm.gz"
-
- fmask_rf:
type: file
description: File with final mask of non-gap RF len
pattern: "*.fmask-rf.gz"
-
- fmask_all:
type: file
description: File with final mask of full aln len
pattern: "*.fmask-all.gz"
-
- gmask_rf:
type: file
description: File with gap-based 0/1 mask of non-gap RF len
pattern: "*.gmask-rf.gz"
-
- gmask_all:
type: file
description: File with gap-based 0/1 mask of full aln len
pattern: "*.gmask-all.gz"
-
- pmask_rf:
type: file
description: File with PP-based 0/1 mask of non-gap RF len
pattern: "*.pmask-rf.gz"
-
- pmask_all:
type: file
description: File with PP-based 0/1 mask of full aln len
pattern: "*.pmask-all.gz"
-
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/modules/nf-core/hmmer/eslreformat/environment.yml b/modules/nf-core/hmmer/eslreformat/environment.yml
new file mode 100644
index 000000000..a847b7d31
--- /dev/null
+++ b/modules/nf-core/hmmer/eslreformat/environment.yml
@@ -0,0 +1,7 @@
+name: hmmer_eslreformat
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::hmmer=3.3.2
diff --git a/modules/nf-core/hmmer/eslreformat/main.nf b/modules/nf-core/hmmer/eslreformat/main.nf
index fd675b79e..ffac91172 100644
--- a/modules/nf-core/hmmer/eslreformat/main.nf
+++ b/modules/nf-core/hmmer/eslreformat/main.nf
@@ -2,7 +2,7 @@ process HMMER_ESLREFORMAT {
tag "$meta.id"
label 'process_single'
- conda "bioconda::hmmer=3.3.2"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1':
'biocontainers/hmmer:3.3.2--h1b792b2_1' }"
diff --git a/modules/nf-core/hmmer/eslreformat/meta.yml b/modules/nf-core/hmmer/eslreformat/meta.yml
index 05ff04a9c..7e530d59a 100644
--- a/modules/nf-core/hmmer/eslreformat/meta.yml
+++ b/modules/nf-core/hmmer/eslreformat/meta.yml
@@ -7,10 +7,8 @@ tools:
description: "Biosequence analysis using profile hidden Markov models"
homepage: http://hmmer.org/
documentation: http://hmmer.org/documentation.html
-
doi: "10.1371/journal.pcbi.1002195"
licence: ["BSD-3-Clause"]
-
input:
- meta:
type: map
@@ -21,7 +19,6 @@ input:
type: file
description: Sequences, aligned or not, in any supported format
pattern: "*"
-
output:
- meta:
type: map
@@ -36,6 +33,7 @@ output:
type: file
description: Reformated sequence file
pattern: "*.*.gz"
-
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/modules/nf-core/hmmer/hmmalign/environment.yml b/modules/nf-core/hmmer/hmmalign/environment.yml
new file mode 100644
index 000000000..9fa6bdafc
--- /dev/null
+++ b/modules/nf-core/hmmer/hmmalign/environment.yml
@@ -0,0 +1,7 @@
+name: hmmer_hmmalign
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::hmmer=3.3.2
diff --git a/modules/nf-core/hmmer/hmmalign/main.nf b/modules/nf-core/hmmer/hmmalign/main.nf
index 74e7ee026..00ae8da2b 100644
--- a/modules/nf-core/hmmer/hmmalign/main.nf
+++ b/modules/nf-core/hmmer/hmmalign/main.nf
@@ -2,7 +2,7 @@ process HMMER_HMMALIGN {
tag "$meta.id"
label 'process_single'
- conda "bioconda::hmmer=3.3.2"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1' :
'biocontainers/hmmer:3.3.2--h1b792b2_1' }"
diff --git a/modules/nf-core/hmmer/hmmalign/meta.yml b/modules/nf-core/hmmer/hmmalign/meta.yml
index df1bd37e7..c1ac8f408 100644
--- a/modules/nf-core/hmmer/hmmalign/meta.yml
+++ b/modules/nf-core/hmmer/hmmalign/meta.yml
@@ -7,10 +7,8 @@ tools:
description: Biosequence analysis using profile hidden Markov models
homepage: http://hmmer.org/
documentation: http://hmmer.org/documentation.html
-
doi: "10.1371/journal.pcbi.1002195"
licence: ["BSD-3-Clause"]
-
input:
- meta:
type: map
@@ -25,7 +23,6 @@ input:
type: file
description: A gzipped HMM file
pattern: "*.hmm.gz"
-
output:
- meta:
type: map
@@ -40,7 +37,9 @@ output:
type: file
description: Multiple alignment in gzipped Stockholm format
pattern: "*.sthlm.gz"
-
authors:
- "@erikrikarddaniel"
- "@jfy133"
+maintainers:
+ - "@erikrikarddaniel"
+ - "@jfy133"
diff --git a/modules/nf-core/hmmer/hmmbuild/environment.yml b/modules/nf-core/hmmer/hmmbuild/environment.yml
new file mode 100644
index 000000000..1957ad5c6
--- /dev/null
+++ b/modules/nf-core/hmmer/hmmbuild/environment.yml
@@ -0,0 +1,7 @@
+name: hmmer_hmmbuild
+channels:
+ - conda-forge
+ - bioconda
+ - defaults
+dependencies:
+ - bioconda::hmmer=3.3.2
diff --git a/modules/nf-core/hmmer/hmmbuild/main.nf b/modules/nf-core/hmmer/hmmbuild/main.nf
index 1cbe6c952..3e3022fe6 100644
--- a/modules/nf-core/hmmer/hmmbuild/main.nf
+++ b/modules/nf-core/hmmer/hmmbuild/main.nf
@@ -2,7 +2,7 @@ process HMMER_HMMBUILD {
tag "$meta.id"
label 'process_low'
- conda "bioconda::hmmer=3.3.2"
+ conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h87f3376_2':
'biocontainers/hmmer:3.3.2--h1b792b2_1' }"
diff --git a/modules/nf-core/hmmer/hmmbuild/meta.yml b/modules/nf-core/hmmer/hmmbuild/meta.yml
index 77af69cff..4bf6b1cfe 100644
--- a/modules/nf-core/hmmer/hmmbuild/meta.yml
+++ b/modules/nf-core/hmmer/hmmbuild/meta.yml
@@ -14,7 +14,6 @@ tools:
tool_dev_url: "https://github.com/EddyRivasLab/hmmer"
doi: "10.1371/journal.pcbi.1002195"
licence: "['BSD']"
-
input:
- meta:
type: map
@@ -29,7 +28,6 @@ input:
type: file
description: read substitution score matrix, for use when building profiles from single sequences (--singlemx option)
pattern: "*"
-
output:
- versions:
type: file
@@ -39,6 +37,7 @@ output:
type: file
description: Gzipped HMM file
pattern: "*.{hmm.gz}"
-
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/modules/nf-core/multiqc/environment.yml b/modules/nf-core/multiqc/environment.yml
index bc0bdb5b6..7625b7520 100644
--- a/modules/nf-core/multiqc/environment.yml
+++ b/modules/nf-core/multiqc/environment.yml
@@ -4,4 +4,4 @@ channels:
- bioconda
- defaults
dependencies:
- - bioconda::multiqc=1.18
+ - bioconda::multiqc=1.19
diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf
index 00cc48d27..1b9f7c431 100644
--- a/modules/nf-core/multiqc/main.nf
+++ b/modules/nf-core/multiqc/main.nf
@@ -3,8 +3,8 @@ process MULTIQC {
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/multiqc:1.18--pyhdfd78af_0' :
- 'biocontainers/multiqc:1.18--pyhdfd78af_0' }"
+ 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' :
+ 'biocontainers/multiqc:1.19--pyhdfd78af_0' }"
input:
path multiqc_files, stageAs: "?/*"
@@ -43,7 +43,7 @@ process MULTIQC {
stub:
"""
- touch multiqc_data
+ mkdir multiqc_data
touch multiqc_plots
touch multiqc_report.html
diff --git a/modules/nf-core/multiqc/meta.yml b/modules/nf-core/multiqc/meta.yml
index f1aa660eb..45a9bc35e 100644
--- a/modules/nf-core/multiqc/meta.yml
+++ b/modules/nf-core/multiqc/meta.yml
@@ -1,4 +1,3 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json
name: multiqc
description: Aggregate results from bioinformatics analyses across many samples into a single report
keywords:
diff --git a/modules/nf-core/multiqc/tests/main.nf.test b/modules/nf-core/multiqc/tests/main.nf.test
index c2dad217c..d0438eda6 100644
--- a/modules/nf-core/multiqc/tests/main.nf.test
+++ b/modules/nf-core/multiqc/tests/main.nf.test
@@ -7,12 +7,9 @@ nextflow_process {
tag "modules_nfcore"
tag "multiqc"
- test("MULTIQC: FASTQC") {
+ test("sarscov2 single-end [fastqc]") {
when {
- params {
- outdir = "$outputDir"
- }
process {
"""
input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
@@ -26,20 +23,17 @@ nextflow_process {
then {
assertAll(
{ assert process.success },
- { assert path(process.out.report.get(0)).exists() },
- { assert path(process.out.data.get(0)).exists() },
- { assert path(process.out.versions.get(0)).getText().contains("multiqc") }
+ { assert process.out.report[0] ==~ ".*/multiqc_report.html" },
+ { assert process.out.data[0] ==~ ".*/multiqc_data" },
+ { assert snapshot(process.out.versions).match("versions") }
)
}
}
- test("MULTIQC: FASTQC and a config file") {
+ test("sarscov2 single-end [fastqc] [config]") {
when {
- params {
- outdir = "$outputDir"
- }
process {
"""
input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
@@ -53,9 +47,35 @@ nextflow_process {
then {
assertAll(
{ assert process.success },
- { assert path(process.out.report.get(0)).exists() },
- { assert path(process.out.data.get(0)).exists() },
- { assert path(process.out.versions.get(0)).getText().contains("multiqc") }
+ { assert process.out.report[0] ==~ ".*/multiqc_report.html" },
+ { assert process.out.data[0] ==~ ".*/multiqc_data" },
+ { assert snapshot(process.out.versions).match("versions") }
+ )
+ }
+ }
+
+ test("sarscov2 single-end [fastqc] - stub") {
+
+ options "-stub"
+
+ when {
+ process {
+ """
+ input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
+ input[1] = []
+ input[2] = []
+ input[3] = []
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert process.success },
+ { assert snapshot(process.out.report.collect { file(it).getName() } +
+ process.out.data.collect { file(it).getName() } +
+ process.out.plots.collect { file(it).getName() } +
+ process.out.versions ).match() }
)
}
diff --git a/modules/nf-core/multiqc/tests/main.nf.test.snap b/modules/nf-core/multiqc/tests/main.nf.test.snap
new file mode 100644
index 000000000..d37e73040
--- /dev/null
+++ b/modules/nf-core/multiqc/tests/main.nf.test.snap
@@ -0,0 +1,21 @@
+{
+ "versions": {
+ "content": [
+ [
+ "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d"
+ ]
+ ],
+ "timestamp": "2024-01-09T23:02:49.911994"
+ },
+ "sarscov2 single-end [fastqc] - stub": {
+ "content": [
+ [
+ "multiqc_report.html",
+ "multiqc_data",
+ "multiqc_plots",
+ "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d"
+ ]
+ ],
+ "timestamp": "2024-01-09T23:03:14.524346"
+ }
+}
\ No newline at end of file
diff --git a/nextflow.config b/nextflow.config
index 8e3566118..b65af8c81 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -170,7 +170,7 @@ try {
}
// Load nf-core/ampliseq custom profiles from different institutions.
-// Warning: Uncomment only if a pipeline-specific instititutional config already exists on nf-core/configs!
+// Warning: Uncomment only if a pipeline-specific institutional config already exists on nf-core/configs!
// try {
// includeConfig "${params.custom_config_base}/pipeline/ampliseq.config"
// } catch (Exception e) {
@@ -190,6 +190,7 @@ profiles {
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
+ channels = ['conda-forge', 'bioconda', 'defaults']
apptainer.enabled = false
}
mamba {
diff --git a/pyproject.toml b/pyproject.toml
index 0d62beb6f..7d08e1c8e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,10 +1,13 @@
-# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Black.
+# Config file for Python. Mostly used to configure linting of bin/*.py with Ruff.
# Should be kept the same as nf-core/tools to avoid fighting with template synchronisation.
-[tool.black]
+[tool.ruff]
line-length = 120
-target_version = ["py37", "py38", "py39", "py310"]
+target-version = "py38"
+select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"]
+cache-dir = "~/.cache/ruff"
-[tool.isort]
-profile = "black"
-known_first_party = ["nf_core"]
-multi_line_output = 3
+[tool.ruff.isort]
+known-first-party = ["nf_core"]
+
+[tool.ruff.per-file-ignores]
+"__init__.py" = ["E402", "F401"]
diff --git a/subworkflows/local/qiime2_barplotavg.nf b/subworkflows/local/qiime2_barplotavg.nf
index 8814a8ea2..a052027cf 100644
--- a/subworkflows/local/qiime2_barplotavg.nf
+++ b/subworkflows/local/qiime2_barplotavg.nf
@@ -20,7 +20,7 @@ workflow QIIME2_BARPLOTAVG {
QIIME2_INASV_BPAVG ( ch_rel_tsv )
//group by metadata category (ch_metadata_category_barplot)
- QIIME2_FEATURETABLE_GROUP (
+ QIIME2_FEATURETABLE_GROUP (
QIIME2_INASV_BPAVG.out.qza
.combine(ch_metadata)
.combine(ch_metadata_category_barplot)
diff --git a/subworkflows/nf-core/fasta_newick_epang_gappa/meta.yml b/subworkflows/nf-core/fasta_newick_epang_gappa/meta.yml
index 60002c82c..e458915e8 100644
--- a/subworkflows/nf-core/fasta_newick_epang_gappa/meta.yml
+++ b/subworkflows/nf-core/fasta_newick_epang_gappa/meta.yml
@@ -96,3 +96,5 @@ output:
pattern: "versions.yml"
authors:
- "@erikrikarddaniel"
+maintainers:
+ - "@erikrikarddaniel"
diff --git a/tests/pipeline/doubleprimers.nf.test.snap b/tests/pipeline/doubleprimers.nf.test.snap
index fba0c40b8..81545d788 100644
--- a/tests/pipeline/doubleprimers.nf.test.snap
+++ b/tests/pipeline/doubleprimers.nf.test.snap
@@ -13,7 +13,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, KRAKEN2_KRAKEN2={kraken2=2.1.2, pigz=2.6}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, KRAKEN2_KRAKEN2={kraken2=2.1.2, pigz=2.6}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-07-27T13:49:03+0000"
},
diff --git a/tests/pipeline/fasta.nf.test.snap b/tests/pipeline/fasta.nf.test.snap
index 70bcf0421..00351b323 100644
--- a/tests/pipeline/fasta.nf.test.snap
+++ b/tests/pipeline/fasta.nf.test.snap
@@ -7,7 +7,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FILTER_CODONS={pandas=1.1.5, python=3.9.1}, FILTER_LEN_ASV={Biostrings=2.58.0, R=4.0.3}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FILTER_CODONS={pandas=1.1.5, python=3.9.1}, FILTER_LEN_ASV={Biostrings=2.58.0, R=4.0.3}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-05-28T21:06:17+0000"
},
diff --git a/tests/pipeline/iontorrent.nf.test.snap b/tests/pipeline/iontorrent.nf.test.snap
index 203423476..27f6d930a 100644
--- a/tests/pipeline/iontorrent.nf.test.snap
+++ b/tests/pipeline/iontorrent.nf.test.snap
@@ -13,7 +13,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-06-20T01:42:35+0000"
},
diff --git a/tests/pipeline/multi.nf.test.snap b/tests/pipeline/multi.nf.test.snap
index c5492d964..4a8d2afc0 100644
--- a/tests/pipeline/multi.nf.test.snap
+++ b/tests/pipeline/multi.nf.test.snap
@@ -14,7 +14,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-05-28T21:15:03+0000"
},
diff --git a/tests/pipeline/novaseq.nf.test.snap b/tests/pipeline/novaseq.nf.test.snap
index 0591a632e..7a294cec5 100644
--- a/tests/pipeline/novaseq.nf.test.snap
+++ b/tests/pipeline/novaseq.nf.test.snap
@@ -7,7 +7,7 @@
},
"software_versions": {
"content": [
- "{CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FILTER_CODONS={pandas=1.1.5, python=3.9.1}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FILTER_CODONS={pandas=1.1.5, python=3.9.1}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-06-20T00:10:02+0000"
},
diff --git a/tests/pipeline/pacbio_its.nf.test.snap b/tests/pipeline/pacbio_its.nf.test.snap
index 43c873cf1..74f31f81d 100644
--- a/tests/pipeline/pacbio_its.nf.test.snap
+++ b/tests/pipeline/pacbio_its.nf.test.snap
@@ -35,7 +35,7 @@
},
"software_versions": {
"content": [
- "{ASSIGNSH={pandas=1.1.5, python=3.9.1}, BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FORMAT_TAXRESULTS_STD={pandas=1.1.5, python=3.9.1}, ITSX_CUTASV={ITSx=1.1.3}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, SBDIEXPORT={R=3.6.3}, VSEARCH_USEARCHGLOBAL={vsearch=2.21.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{ASSIGNSH={pandas=1.1.5, python=3.9.1}, BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FORMAT_TAXRESULTS_STD={pandas=1.1.5, python=3.9.1}, ITSX_CUTASV={ITSx=1.1.3}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, SBDIEXPORT={R=3.6.3}, VSEARCH_USEARCHGLOBAL={vsearch=2.21.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-06-20T02:07:02+0000"
},
diff --git a/tests/pipeline/pplace.nf.test.snap b/tests/pipeline/pplace.nf.test.snap
index 6e7e9ca63..35a33476e 100644
--- a/tests/pipeline/pplace.nf.test.snap
+++ b/tests/pipeline/pplace.nf.test.snap
@@ -8,7 +8,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, EPANG_PLACE={epang=0.3.8}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, GAPPA_ASSIGN={gappa=0.8.0}, GAPPA_GRAFT={gappa=0.8.0}, GAPPA_HEATTREE={gappa=0.8.0}, HMMER_AFAFORMATQUERY={hmmer/easel=0.48}, HMMER_AFAFORMATREF={hmmer/easel=0.48}, HMMER_HMMALIGNQUERY={hmmer=3.3.2}, HMMER_HMMALIGNREF={hmmer=3.3.2}, HMMER_HMMBUILD={hmmer=3.3.2}, HMMER_MASKQUERY={hmmer/easel=0.48}, HMMER_MASKREF={hmmer/easel=0.48}, HMMER_UNALIGNREF={hmmer/easel=0.48}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, QIIME2_INSEQ={qiime2=2023.7.0}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, EPANG_PLACE={epang=0.3.8}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, GAPPA_ASSIGN={gappa=0.8.0}, GAPPA_GRAFT={gappa=0.8.0}, GAPPA_HEATTREE={gappa=0.8.0}, HMMER_AFAFORMATQUERY={hmmer/easel=0.48}, HMMER_AFAFORMATREF={hmmer/easel=0.48}, HMMER_HMMALIGNQUERY={hmmer=3.3.2}, HMMER_HMMALIGNREF={hmmer=3.3.2}, HMMER_HMMBUILD={hmmer=3.3.2}, HMMER_MASKQUERY={hmmer/easel=0.48}, HMMER_MASKREF={hmmer/easel=0.48}, HMMER_UNALIGNREF={hmmer/easel=0.48}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, QIIME2_INSEQ={qiime2=2023.7.0}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-06-20T17:24:03+0000"
},
diff --git a/tests/pipeline/qiimecustom.nf.test.snap b/tests/pipeline/qiimecustom.nf.test.snap
index 5a9d31263..9bbb27a3a 100644
--- a/tests/pipeline/qiimecustom.nf.test.snap
+++ b/tests/pipeline/qiimecustom.nf.test.snap
@@ -13,7 +13,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, QIIME2_INSEQ={qiime2=2023.7.0}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, QIIME2_INSEQ={qiime2=2023.7.0}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-05-28T21:18:54+0000"
},
diff --git a/tests/pipeline/reftaxcustom.nf.test.snap b/tests/pipeline/reftaxcustom.nf.test.snap
index 22db22520..077b9391f 100644
--- a/tests/pipeline/reftaxcustom.nf.test.snap
+++ b/tests/pipeline/reftaxcustom.nf.test.snap
@@ -13,7 +13,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, KRAKEN2_KRAKEN2={kraken2=2.1.2, pigz=2.6}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, QIIME2_INSEQ={qiime2=2023.7.0}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, KRAKEN2_KRAKEN2={kraken2=2.1.2, pigz=2.6}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, QIIME2_INSEQ={qiime2=2023.7.0}, RENAME_RAW_DATA_FILES={sed=4.7}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-05-28T21:18:54+0000"
},
diff --git a/tests/pipeline/single.nf.test.snap b/tests/pipeline/single.nf.test.snap
index 44977b82f..b73d8e39e 100644
--- a/tests/pipeline/single.nf.test.snap
+++ b/tests/pipeline/single.nf.test.snap
@@ -13,7 +13,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-05-28T20:35:33+0000"
},
diff --git a/tests/pipeline/sintax.nf.test.snap b/tests/pipeline/sintax.nf.test.snap
index 03e44107e..749ba2175 100644
--- a/tests/pipeline/sintax.nf.test.snap
+++ b/tests/pipeline/sintax.nf.test.snap
@@ -16,7 +16,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, ITSX_CUTASV={ITSx=1.1.3}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, SBDIEXPORT={R=3.6.3}, VSEARCH_SINTAX={vsearch=2.21.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, ITSX_CUTASV={ITSx=1.1.3}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, RENAME_RAW_DATA_FILES={sed=4.7}, SBDIEXPORT={R=3.6.3}, VSEARCH_SINTAX={vsearch=2.21.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-06-20T16:40:18+0000"
},
diff --git a/tests/pipeline/test.nf.test.snap b/tests/pipeline/test.nf.test.snap
index 89702ac1c..dd523b980 100644
--- a/tests/pipeline/test.nf.test.snap
+++ b/tests/pipeline/test.nf.test.snap
@@ -22,7 +22,7 @@
},
"software_versions": {
"content": [
- "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.12.0, yaml=6.0.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FILTER_CLUSTERS={pandas=1.1.5, python=3.9.1}, FILTER_LEN_ASV={Biostrings=2.58.0, R=4.0.3}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, QIIME2_INSEQ={qiime2=2023.7.0}, RENAME_RAW_DATA_FILES={sed=4.7}, SBDIEXPORT={R=3.6.3}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, VSEARCH_CLUSTER={vsearch=2.21.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
+ "{BARRNAP={barrnap=0.9}, CUSTOM_DUMPSOFTWAREVERSIONS={python=3.11.7, yaml=5.4.1}, CUTADAPT_BASIC={cutadapt=3.4}, DADA2_DENOISING={R=4.3.1, dada2=1.28.0}, DADA2_FILTNTRIM={R=4.3.1, dada2=1.28.0}, DADA2_QUALITY1={R=4.3.1, ShortRead=1.58.0, dada2=1.28.0}, DADA2_TAXONOMY={R=4.3.1, dada2=1.28.0}, FASTQC={fastqc=0.12.1}, FILTER_CLUSTERS={pandas=1.1.5, python=3.9.1}, FILTER_LEN_ASV={Biostrings=2.58.0, R=4.0.3}, FILTER_STATS={pandas=1.1.5, python=3.9.1}, PHYLOSEQ={R=4.3.0, phyloseq=1.44.0}, QIIME2_INSEQ={qiime2=2023.7.0}, RENAME_RAW_DATA_FILES={sed=4.7}, SBDIEXPORT={R=3.6.3}, TRUNCLEN={pandas=1.1.5, python=3.9.1}, VSEARCH_CLUSTER={vsearch=2.21.1}, Workflow={nf-core/ampliseq=2.9.0dev}}"
],
"timestamp": "2023-05-28T20:55:32+0000"
},
diff --git a/workflows/ampliseq.nf b/workflows/ampliseq.nf
index 3f158200d..10039eda3 100644
--- a/workflows/ampliseq.nf
+++ b/workflows/ampliseq.nf
@@ -916,6 +916,13 @@ workflow.onComplete {
}
}
+workflow.onError {
+ if (workflow.errorReport.contains("Process requirement exceeds available memory")) {
+ println("🛑 Default resources exceed availability 🛑 ")
+ println("💡 See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources 💡")
+ }
+}
+
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
THE END