diff --git a/.editorconfig b/.editorconfig index dcb6e046..d3f7b6fb 100644 --- a/.editorconfig +++ b/.editorconfig @@ -32,3 +32,11 @@ indent_size = unset [/assets/*.Rmd] indent_size = unset + +# ignore Readme +[README.md] +indent_style = unset + +# ignore python +[*.{py}] +indent_style = unset diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 902a3782..8d048e4e 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -27,6 +27,9 @@ If you're not used to this workflow with git, you can start with some [docs from ## Tests +You can optionally test your changes by running the pipeline locally. Then it is recommended to use the `debug` profile to +receive warnings about process selectors and other debug info. Example: `nextflow run . -profile debug,test,docker --outdir `. + When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 2318ed30..1c3a0658 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -19,6 +19,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/airr - [ ] If necessary, also make a PR on the nf-core/airrflow _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. - [ ] Make sure your code lints (`nf-core lint`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). +- [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). - [ ] Usage Documentation in `docs/usage.md` is updated. - [ ] Output Documentation in `docs/output.md` is updated. - [ ] `CHANGELOG.md` is updated. diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml index 6b25b2c6..62ab5695 100644 --- a/.github/workflows/awsfulltest.yml +++ b/.github/workflows/awsfulltest.yml @@ -28,7 +28,7 @@ jobs: } profiles: test_full - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: Tower debug log file path: | diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index 269c6875..869b2ab2 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -25,7 +25,7 @@ jobs: } profiles: test - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: Tower debug log file path: | diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index 5090169a..8849d45e 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -19,7 +19,7 @@ jobs: # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets - name: Post PR comment if: failure() - uses: mshick/add-pr-comment@v1 + uses: mshick/add-pr-comment@v2 with: message: | ## This PR is against the `master` branch :x: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5cd19a48..cac7308e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: - "latest-everything" steps: - name: Check out pipeline code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml index 694e90ec..e37cfda5 100644 --- a/.github/workflows/clean-up.yml +++ b/.github/workflows/clean-up.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v7 + - uses: actions/stale@v9 with: stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." diff --git a/.github/workflows/download_pipeline.yml b/.github/workflows/download_pipeline.yml new file mode 100644 index 00000000..8611458a --- /dev/null +++ b/.github/workflows/download_pipeline.yml @@ -0,0 +1,67 @@ +name: Test successful pipeline download with 'nf-core download' + +# Run the workflow when: +# - dispatched manually +# - when a PR is opened or reopened to master branch +# - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev. +on: + workflow_dispatch: + pull_request: + types: + - opened + branches: + - master + pull_request_target: + branches: + - master + +env: + NXF_ANSI_LOG: false + +jobs: + download: + runs-on: ubuntu-latest + steps: + - name: Install Nextflow + uses: nf-core/setup-nextflow@v1 + + - uses: actions/setup-python@v5 + with: + python-version: "3.11" + architecture: "x64" + - uses: eWaterCycle/setup-singularity@v7 + with: + singularity-version: 3.8.3 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install git+https://github.com/nf-core/tools.git@dev + + - name: Get the repository name and current branch set as environment variable + run: | + echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} + echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} + echo "REPO_BRANCH=${GITHUB_REF#refs/heads/}" >> ${GITHUB_ENV} + + - name: Download the pipeline + env: + NXF_SINGULARITY_CACHEDIR: ./ + run: | + nf-core download ${{ env.REPO_LOWERCASE }} \ + --revision ${{ env.REPO_BRANCH }} \ + --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ + --compress "none" \ + --container-system 'singularity' \ + --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ + --container-cache-utilisation 'amend' \ + --download-configuration + + - name: Inspect download + run: tree ./${{ env.REPOTITLE_LOWERCASE }} + + - name: Run the downloaded pipeline + env: + NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_HOME_MOUNT: true + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 515f7455..8dda78ab 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -4,7 +4,7 @@ on: types: [created] jobs: - deploy: + fix-linting: # Only run if comment is on a PR with the main repo, and if it contains the magic keywords if: > contains(github.event.comment.html_url, '/pull/') && @@ -13,10 +13,17 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@v3 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} + # indication that the linting is being fixed + - name: React on comment + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: eyes + # Action runs on the issue comment, so we don't get the PR by default # Use the gh cli to check out the PR - name: Checkout Pull Request @@ -24,32 +31,59 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v3 + # Install and run pre-commit + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 + with: + python-version: 3.11 - - name: Install Prettier - run: npm install -g prettier @prettier/plugin-php + - name: Install pre-commit + run: pip install pre-commit - # Check that we actually need to fix something - - name: Run 'prettier --check' - id: prettier_status - run: | - if prettier --check ${GITHUB_WORKSPACE}; then - echo "result=pass" >> $GITHUB_OUTPUT - else - echo "result=fail" >> $GITHUB_OUTPUT - fi + - name: Run pre-commit + id: pre-commit + run: pre-commit run --all-files + continue-on-error: true - - name: Run 'prettier --write' - if: steps.prettier_status.outputs.result == 'fail' - run: prettier --write ${GITHUB_WORKSPACE} + # indication that the linting has finished + - name: react if linting finished succesfully + if: steps.pre-commit.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: "+1" - name: Commit & push changes - if: steps.prettier_status.outputs.result == 'fail' + id: commit-and-push + if: steps.pre-commit.outcome == 'failure' run: | git config user.email "core@nf-co.re" git config user.name "nf-core-bot" git config push.default upstream git add . git status - git commit -m "[automated] Fix linting with Prettier" + git commit -m "[automated] Fix code linting" git push + + - name: react if linting errors were fixed + id: react-if-fixed + if: steps.commit-and-push.outcome == 'success' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: hooray + + - name: react if linting errors were not fixed + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + comment-id: ${{ github.event.comment.id }} + reactions: confused + + - name: react if linting errors were not fixed + if: steps.commit-and-push.outcome == 'failure' + uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 + with: + issue-number: ${{ github.event.issue.number }} + body: | + @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually. + See [CI log](https://github.com/nf-core/airrflow/actions/runs/${{ github.run_id }}) for more details. diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index b8bdd214..81cd098e 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -11,72 +11,33 @@ on: types: [published] jobs: - EditorConfig: + pre-commit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 - - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker - - - name: Run ECLint check - run: editorconfig-checker -exclude README.md $(find .* -type f | grep -v '.git\|.py\|.md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') - - Prettier: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-node@v3 - - - name: Install Prettier - run: npm install -g prettier - - - name: Run Prettier --check - run: prettier --check ${GITHUB_WORKSPACE} - - PythonBlack: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Check code lints with Black - uses: psf/black@stable - - # If the above check failed, post a comment on the PR explaining the failure - - name: Post PR comment - if: failure() - uses: mshick/add-pr-comment@v1 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 with: - message: | - ## Python linting (`black`) is failing - - To keep the code consistent with lots of contributors, we run automated code consistency checks. - To fix this CI test, please run: - - * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black` - * Fix formatting errors in your pipeline: `black .` - - Once you push these changes the test should pass, and you can hide this comment :+1: + python-version: 3.11 + cache: "pip" - We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help! + - name: Install pre-commit + run: pip install pre-commit - Thanks again for your contribution! - repo-token: ${{ secrets.GITHUB_TOKEN }} - allow-repeats: false + - name: Run pre-commit + run: pre-commit run --all-files nf-core: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.11" architecture: "x64" @@ -99,7 +60,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: linting-logs path: | diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml index 0bbcd30f..147bcd10 100644 --- a/.github/workflows/linting_comment.yml +++ b/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@v2 + uses: dawidd6/action-download-artifact@v3 with: workflow: linting.yml workflow_conclusion: completed diff --git a/.github/workflows/release-announcments.yml b/.github/workflows/release-announcements.yml similarity index 96% rename from .github/workflows/release-announcments.yml rename to .github/workflows/release-announcements.yml index 6ad33927..21ac3f06 100644 --- a/.github/workflows/release-announcments.yml +++ b/.github/workflows/release-announcements.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install dependencies @@ -56,7 +56,7 @@ jobs: bsky-post: runs-on: ubuntu-latest steps: - - uses: zentered/bluesky-post-action@v0.0.2 + - uses: zentered/bluesky-post-action@v0.1.0 with: post: | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! diff --git a/.gitpod.yml b/.gitpod.yml index 25488dcc..363d5b1d 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -4,6 +4,9 @@ tasks: command: | pre-commit install --install-hooks nextflow self-update + - name: unset JAVA_TOOL_OPTIONS + command: | + unset JAVA_TOOL_OPTIONS vscode: extensions: # based on nf-core.nf-core-extensionpack diff --git a/.nf-core.yml b/.nf-core.yml index 4ae6f7f0..6f253076 100644 --- a/.nf-core.yml +++ b/.nf-core.yml @@ -3,4 +3,12 @@ lint: - conf/igenomes.config multiqc_config: - report_comment + nextflow_config: + - config_defaults: + - params.miairr + - params.report_rmd + - params.report_css + - params.report_logo + - params.report_logo_img + - params.config_profile_url repository_type: pipeline diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0c31cdb9..af57081f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,10 @@ repos: - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.7.1" + rev: "v3.1.0" hooks: - id: prettier + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python + rev: "2.7.3" + hooks: + - id: editorconfig-checker + alias: ec diff --git a/CHANGELOG.md b/CHANGELOG.md index 755f0d29..a2f9f389 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,16 +3,25 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). -## [3.2.1dev] - +## [3.3.0dev] - ### `Added` +- [#294](https://github.com/nf-core/airrflow/pull/294) Merge template updates nf-core/tools v2.11.1 + ### `Fixed` -- Removed optional output from FilterQuality to not fail silently +- [#294](https://github.com/nf-core/airrflow/pull/294) Removed optional output from FilterQuality to not fail silently +- [#293](https://github.com/nf-core/airrflow/pull/293) Clonal_threshold is validated to be 'auto' or number greater than zero +- [#295](https://github.com/nf-core/airrflow/pull/295) Fixed airrflow report sequence plot and add path to clonal analysis reports. ### `Dependencies` +| Dependency | Old version | New version | +| ---------- | ----------- | ----------- | +| multiqc | 1.14 | 1.18 | +| enchantr | 0.9.0 | 0.10.0 | + ## [3.2.0] - 2023-10-27 Expecto patronum ### `Added` diff --git a/README.md b/README.md index 7b11703f..e87b9a4e 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,9 @@ -# ![nf-core/airrflow](docs/images/nf-core-airrflow_logo_light.png#gh-light-mode-only) ![nf-core/airrflow](docs/images/nf-core-airrflow_logo_dark.png#gh-dark-mode-only) - +

+ + + nf-core/airrflow + +

[![GitHub Actions CI Status](https://github.com/nf-core/airrflow/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/airrflow/actions?query=workflow%3A%22nf-core+CI%22) [![GitHub Actions Linting Status](https://github.com/nf-core/airrflow/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/airrflow/actions?query=workflow%3A%22nf-core+linting%22) [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/airrflow/results) @@ -77,11 +81,8 @@ nf-core/airrflow allows the end-to-end processing of BCR and TCR bulk and single ## Usage -:::note -If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how -to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) -with `-profile test` before running the workflow on actual data. -::: +> [!NOTE] +> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data. First, ensure that the pipeline tests run on your infrastructure: @@ -143,11 +144,17 @@ For more details about the output files and reports, please refer to the ## Credits -nf-core/airrflow was written by [Gisela Gabernet](https://github.com/ggabernet), [Susanna Marquez](https://github.com/ssnn-airr), [Alexander Peltzer](@apeltzer) and [Simon Heumos](@subwaystation). +nf-core/airrflow was originally written by: -Further contributors to the pipeline are: +- [Gisela Gabernet](https://github.com/ggabernet) +- [Susanna Marquez](https://github.com/ssnn-airr) +- [Alexander Peltzer](@apeltzer) +- [Simon Heumos](@subwaystation) -- [@dladd](https://github.com/dladd) +We thank the following people for their extensive assistance in the development of the pipeline: + +- [David Ladd](https://github.com/dladd) +- [Friederike Hanssen](https://github.com/ggabernet/friederikehanssen) ## Contributions and Support @@ -157,9 +164,17 @@ For further information or help, don't hesitate to get in touch on the [Slack `# ## Citations -If you use nf-core/airrflow for your analysis, please cite it using the following DOI: [10.5281/zenodo.2642009](https://doi.org/10.5281/zenodo.2642009) +If you use nf-core/airrflow for your analysis, please cite the preprint as follows: + +> **nf-core/airrflow: an adaptive immune receptor repertoire analysis workflow employing the Immcantation framework** +> +> Gisela Gabernet, Susanna Marquez, Robert Bjornson, Alexander Peltzer, Hailong Meng, Edel Aron, Noah Y. Lee, Cole Jensen, David Ladd, Friederike Hanssen, Simon Heumos, nf-core community, Gur Yaari, Markus C. Kowarik, Sven Nahnsen, Steven H. Kleinstein. +> +> BioRxiv. 2024. doi: [10.1101/2024.01.18.576147](https://doi.org/10.1101/2024.01.18.576147). + +The specific pipeline version it using the following DOI: [10.5281/zenodo.2642009](https://doi.org/10.5281/zenodo.2642009) -An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. +Please also cite all the tools that are being used by the pipeline. An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. You can cite the `nf-core` publication as follows: diff --git a/assets/email_template.html b/assets/email_template.html index 2603a841..43c3875d 100644 --- a/assets/email_template.html +++ b/assets/email_template.html @@ -12,7 +12,7 @@ -

nf-core/airrflow v${version}

+

nf-core/airrflow ${version}

Run Name: $runName

<% if (!success){ diff --git a/assets/email_template.txt b/assets/email_template.txt index 4c539b92..3629a189 100644 --- a/assets/email_template.txt +++ b/assets/email_template.txt @@ -4,7 +4,7 @@ |\\ | |__ __ / ` / \\ |__) |__ } { | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, `._,._,' - nf-core/airrflow v${version} + nf-core/airrflow ${version} ---------------------------------------------------- Run Name: $runName diff --git a/assets/methods_description_template.yml b/assets/methods_description_template.yml index e4acf864..6a94b1e2 100644 --- a/assets/methods_description_template.yml +++ b/assets/methods_description_template.yml @@ -3,11 +3,9 @@ description: "Suggested text and references to use when describing pipeline usag section_name: "nf-core/airrflow Methods Description" section_href: "https://github.com/nf-core/airrflow" plot_type: "html" -## TODO nf-core: Update the HTML below to your preferred methods description, e.g. add publication citation for this pipeline -## You inject any metadata in the Nextflow '${workflow}' object data: |

Methods

-

Data was processed using nf-core/airrflow v${workflow.manifest.version} ${doi_text} of the nf-core collection of workflows (Ewels et al., 2020), utilising reproducible software environments from the Bioconda (Grüning et al., 2018) and Biocontainers (da Veiga Leprevost et al., 2017) projects.

+

Data was processed using nf-core/airrflow v${workflow.manifest.version} (${doi_text}; Gabernet et al., 2024) of the nf-core collection of workflows (Ewels et al., 2020), utilising reproducible software environments from the Bioconda (Grüning et al., 2018) and Biocontainers (da Veiga Leprevost et al., 2017) projects.

The pipeline was executed with Nextflow v${workflow.nextflow.version} (Di Tommaso et al., 2017) with the following command:

${workflow.commandLine}

${tool_citations}

diff --git a/assets/multiqc_config.yml b/assets/multiqc_config.yml index 52ed38e2..d99b3f03 100644 --- a/assets/multiqc_config.yml +++ b/assets/multiqc_config.yml @@ -1,5 +1,5 @@ -report_comment: - This report has been generated by the nf-core/airrflow +report_comment: > + This report has been generated by the nf-core/airrflow analysis pipeline. For information about how to interpret these results, please see the documentation. diff --git a/assets/nf-core-airrflow_logo_light.png b/assets/nf-core-airrflow_logo_light.png index 2c7265f7..fb7707da 100644 Binary files a/assets/nf-core-airrflow_logo_light.png and b/assets/nf-core-airrflow_logo_light.png differ diff --git a/assets/repertoire_comparison.Rmd b/assets/repertoire_comparison.Rmd index 64d51a89..29fa3b95 100644 --- a/assets/repertoire_comparison.Rmd +++ b/assets/repertoire_comparison.Rmd @@ -48,6 +48,19 @@ datadir <- "." ``` +# Airrflow results + +The repertoires per subject after clonal analysis can be found in the +subdirectory [clonal_analysis/define_clones/all_reps_clone_report/repertoires](clonal_analysis/define_clones/all_reps_clone_report/repertoires). + +Additionally, html reports summarizing the results are provided: + +- Report summarizing the clonal threshold found per each specified cloning group [clonal_analysis/find_threshold/all_reps_dist_report/index.html](clonal_analysis/find_threshold/all_reps_dist_report/index.html). +- Report summarizing the repertoire properties for all the samples [clonal_analysis/define_clones/all_reps_clone_report/index.html](clonal_analysis/define_clones/all_reps_clone_report/index.html). +- Report summarizing the lineage trees for each specified cloning group [clonal_analysis/dowser_lineages/](clonal_analysis/dowser_lineages/). + +A full description of the pipeline results can be found on the Output section of the [nf-core/airrflow website](https://nf-co.re/airrflow). + # Number of sequences ## Sequence assembly steps @@ -97,11 +110,11 @@ if (any(is.na(tab_seqs_assembled$sample_id))) { tab_seqs_assembled$sample_id <- sapply(tab_seqs_assembled$file_0, function(x) unlist(strsplit(as.character(x), "_"))[1]) } -dat <- tidyr::pivot_wider(tab_seqs_assembled, - id_cols=sample_id, - names_from=task, - values_from=to_num_seqs) -dat <- dat %>% dplyr::relocate(any_of(c("sample_id","ConvertDb-fasta", "AssignGenes-igblast", "MakeDB-igblast", "FilterQuality", +dat <- tab_seqs_assembled %>% + tidyr::pivot_wider(id_cols=sample_id, + names_from=task, + values_from=to_num_seqs) +dat <- dat %>% dplyr::select(any_of(c("sample_id","ConvertDb-fasta", "AssignGenes-igblast", "MakeDB-igblast", "FilterQuality", "ParseDb-split", "FilterJunctionMod3","AddMetadata","SingleCellQC","CreateGermlines", "RemoveChimeric","CollapseDuplicates","ClonePass"))) %>% dplyr::arrange(sample_id) @@ -112,12 +125,17 @@ write.table(dat, file=paste0(seq_dir,"/Table_sequences_assembled.tsv"), sep="\t" ```{r assembled_seq_numbers_plot, echo=FALSE, warning=FALSE, results='asis'} -tab_seqs_assembled$task <- factor(tab_seqs_assembled$task, levels=c("AssignGenes-igblast", "MakeDB-igblast", - "FilterQuality", - "ParseDb-split", "FilterJunctionMod3", "AddMetadata", - "CreateGermlines", "RemoveChimeric", "CollapseDuplicates", - "ClonePass")) -tab_seqs_assembled <- tab_seqs_assembled[!grepl("productive-F",tab_seqs_assembled$to_name),] +tab_seqs_assembled <- tab_seqs_assembled %>% + filter( !grepl("-fail.tsv", to_name) ) %>% + filter( !grepl("productive-F.tsv", to_name) ) %>% + dplyr::filter( task %in% c("sample_id","AssignGenes-igblast", "MakeDB-igblast", "FilterQuality", + "ParseDb-split", "FilterJunctionMod3","AddMetadata","SingleCellQC","CreateGermlines", + "RemoveChimeric","CollapseDuplicates","ClonePass")) + +tab_seqs_assembled$task <- factor(tab_seqs_assembled$task, levels=c("AssignGenes-igblast", "MakeDB-igblast", "FilterQuality", + "ParseDb-split", "FilterJunctionMod3", "AddMetadata", "SingleCellQC", + "CreateGermlines", "RemoveChimeric", "CollapseDuplicates", + "ClonePass")) seqs_plot_assembled <- ggplot(data=tab_seqs_assembled, aes(x=task, y=to_num_seqs, group=sample_id)) + diff --git a/assets/slackreport.json b/assets/slackreport.json index bd9523d9..14549b87 100644 --- a/assets/slackreport.json +++ b/assets/slackreport.json @@ -3,7 +3,7 @@ { "fallback": "Plain-text summary of the attachment.", "color": "<% if (success) { %>good<% } else { %>danger<%} %>", - "author_name": "nf-core/airrflow v${version} - ${runName}", + "author_name": "nf-core/airrflow ${version} - ${runName}", "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", "fields": [ diff --git a/conf/modules.config b/conf/modules.config index df8fad6f..3d69f9b4 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -138,7 +138,7 @@ process { ] } - withName: PRESTO_MASKPRIMERS { + withName: PRESTO_MASKPRIMERS_UMI { publishDir = [ path: { "${params.outdir}/presto/02-maskprimers/${meta.id}" }, mode: params.publish_dir_mode, @@ -487,7 +487,7 @@ process { withName: AIRRFLOW_REPORT { publishDir = [ - path: { "${params.outdir}/repertoire_analysis" }, + path: { "${params.outdir}" }, mode: params.publish_dir_mode, saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] @@ -511,7 +511,7 @@ process { } withName: 'MULTIQC' { - ext.args = params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' + ext.args = { params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' } publishDir = [ path: { "${params.outdir}/multiqc" }, mode: params.publish_dir_mode, diff --git a/docs/images/nf-core-airrflow_logo_dark.png b/docs/images/nf-core-airrflow_logo_dark.png index 2ced6a8f..b1c9e6df 100644 Binary files a/docs/images/nf-core-airrflow_logo_dark.png and b/docs/images/nf-core-airrflow_logo_dark.png differ diff --git a/docs/images/nf-core-airrflow_logo_light.png b/docs/images/nf-core-airrflow_logo_light.png index 2c7265f7..f346829a 100644 Binary files a/docs/images/nf-core-airrflow_logo_light.png and b/docs/images/nf-core-airrflow_logo_light.png differ diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index 01b8653d..e248e4c3 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -4,6 +4,7 @@ import org.yaml.snakeyaml.Yaml import groovy.json.JsonOutput +import nextflow.extension.FilesEx class NfcoreTemplate { @@ -141,12 +142,14 @@ class NfcoreTemplate { try { if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail + def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") + sendmail_tf.withWriter { w -> w << sendmail_html } [ 'sendmail', '-t' ].execute() << sendmail_html log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" } catch (all) { // Catch failures and try with plaintext def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] - if ( mqc_report.size() <= max_multiqc_email_size.toBytes() ) { + if ( mqc_report != null && mqc_report.size() <= max_multiqc_email_size.toBytes() ) { mail_cmd += [ '-A', mqc_report ] } mail_cmd.execute() << email_html @@ -155,14 +158,16 @@ class NfcoreTemplate { } // Write summary e-mail HTML to a file - def output_d = new File("${params.outdir}/pipeline_info/") - if (!output_d.exists()) { - output_d.mkdirs() - } - def output_hf = new File(output_d, "pipeline_report.html") + def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") output_hf.withWriter { w -> w << email_html } - def output_tf = new File(output_d, "pipeline_report.txt") + FilesEx.copyTo(output_hf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.html"); + output_hf.delete() + + // Write summary e-mail TXT to a file + def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") output_tf.withWriter { w -> w << email_txt } + FilesEx.copyTo(output_tf.toPath(), "${params.outdir}/pipeline_info/pipeline_report.txt"); + output_tf.delete() } // @@ -227,15 +232,14 @@ class NfcoreTemplate { // Dump pipeline parameters in a json file // public static void dump_parameters(workflow, params) { - def output_d = new File("${params.outdir}/pipeline_info/") - if (!output_d.exists()) { - output_d.mkdirs() - } - def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - def output_pf = new File(output_d, "params_${timestamp}.json") + def filename = "params_${timestamp}.json" + def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") def jsonStr = JsonOutput.toJson(params) - output_pf.text = JsonOutput.prettyPrint(jsonStr) + temp_pf.text = JsonOutput.prettyPrint(jsonStr) + + FilesEx.copyTo(temp_pf.toPath(), "${params.outdir}/pipeline_info/params_${timestamp}.json") + temp_pf.delete() } // diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 538ea8a4..945c19fb 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -23,7 +23,7 @@ class WorkflowMain { // // Validate parameters and print summary to screen // - public static void initialise(workflow, params, log) { + public static void initialise(workflow, params, log, args) { // Print workflow version and exit on --version if (params.version) { @@ -34,6 +34,8 @@ class WorkflowMain { // Check that a -profile or Nextflow config has been provided to run the pipeline NfcoreTemplate.checkConfigProvided(workflow, log) + // Check that the profile doesn't contain spaces and doesn't end with a trailing comma + checkProfile(workflow.profile, args, log) // Check that conda channels are set-up correctly if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) { @@ -59,4 +61,16 @@ class WorkflowMain { } return null } + + // + // Exit pipeline if --profile contains spaces + // + private static void checkProfile(profile, args, log) { + if (profile.endsWith(',')) { + Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`." + } + if (args[0]) { + log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`." + } + } } diff --git a/lib/nfcore_external_java_deps.jar b/lib/nfcore_external_java_deps.jar deleted file mode 100644 index 805c8bb5..00000000 Binary files a/lib/nfcore_external_java_deps.jar and /dev/null differ diff --git a/main.nf b/main.nf index 24de2277..301c6845 100644 --- a/main.nf +++ b/main.nf @@ -33,7 +33,7 @@ if (params.validate_params) { validateParameters() } -WorkflowMain.initialise(workflow, params, log) +WorkflowMain.initialise(workflow, params, log, args) /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/modules.json b/modules.json index f8170ba4..04cd992c 100644 --- a/modules.json +++ b/modules.json @@ -17,7 +17,7 @@ }, "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "911696ea0b62df80e900ef244d7867d177971f73", + "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93", "installed_by": ["modules"] }, "fastp": { @@ -27,12 +27,12 @@ }, "fastqc": { "branch": "master", - "git_sha": "cfd937a668919d948f6fcbf4218e79de50c2f36f", + "git_sha": "c9488585ce7bd35ccd2a30faa2371454c8112fb9", "installed_by": ["modules"] }, "multiqc": { "branch": "master", - "git_sha": "911696ea0b62df80e900ef244d7867d177971f73", + "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93", "installed_by": ["modules"] } } diff --git a/modules/local/airrflow_report/airrflow_report.nf b/modules/local/airrflow_report/airrflow_report.nf index ecac2e49..fafbd052 100644 --- a/modules/local/airrflow_report/airrflow_report.nf +++ b/modules/local/airrflow_report/airrflow_report.nf @@ -6,8 +6,8 @@ process AIRRFLOW_REPORT { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: tuple val(meta), path(tab) // sequence tsv table in AIRR format @@ -21,9 +21,6 @@ process AIRRFLOW_REPORT { path "versions.yml" , emit: versions path("repertoire_comparison"), emit: results_folder path("*.html"), emit: report_html - path(repertoire_report) - path(css) - path(logo) script: """ diff --git a/modules/local/enchantr/collapse_duplicates.nf b/modules/local/enchantr/collapse_duplicates.nf index af640cf5..ebec7209 100644 --- a/modules/local/enchantr/collapse_duplicates.nf +++ b/modules/local/enchantr/collapse_duplicates.nf @@ -8,8 +8,8 @@ process COLLAPSE_DUPLICATES { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: tuple val(meta), path(tabs) // tuple [val(meta), sequence tsv in AIRR format ] diff --git a/modules/local/enchantr/define_clones.nf b/modules/local/enchantr/define_clones.nf index e24c75d9..71deebc2 100644 --- a/modules/local/enchantr/define_clones.nf +++ b/modules/local/enchantr/define_clones.nf @@ -25,8 +25,8 @@ process DEFINE_CLONES { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: tuple val(meta), path(tabs) // meta, sequence tsv in AIRR format diff --git a/modules/local/enchantr/detect_contamination.nf b/modules/local/enchantr/detect_contamination.nf index 0267b81a..1ed10e8e 100644 --- a/modules/local/enchantr/detect_contamination.nf +++ b/modules/local/enchantr/detect_contamination.nf @@ -9,8 +9,8 @@ process DETECT_CONTAMINATION { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: path(tabs) diff --git a/modules/local/enchantr/dowser_lineages.nf b/modules/local/enchantr/dowser_lineages.nf index b9a8de8e..e50a9e07 100644 --- a/modules/local/enchantr/dowser_lineages.nf +++ b/modules/local/enchantr/dowser_lineages.nf @@ -25,8 +25,8 @@ process DOWSER_LINEAGES { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: tuple val(meta), path(tabs) diff --git a/modules/local/enchantr/find_threshold.nf b/modules/local/enchantr/find_threshold.nf index 08178111..89b1c3b8 100644 --- a/modules/local/enchantr/find_threshold.nf +++ b/modules/local/enchantr/find_threshold.nf @@ -25,8 +25,8 @@ process FIND_THRESHOLD { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: diff --git a/modules/local/enchantr/remove_chimeric.nf b/modules/local/enchantr/remove_chimeric.nf index 32522aa2..76f4e0b5 100644 --- a/modules/local/enchantr/remove_chimeric.nf +++ b/modules/local/enchantr/remove_chimeric.nf @@ -9,8 +9,8 @@ process REMOVE_CHIMERIC { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: diff --git a/modules/local/enchantr/report_file_size.nf b/modules/local/enchantr/report_file_size.nf index 804ebd61..ece9d93f 100644 --- a/modules/local/enchantr/report_file_size.nf +++ b/modules/local/enchantr/report_file_size.nf @@ -10,8 +10,8 @@ process REPORT_FILE_SIZE { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: path logs diff --git a/modules/local/enchantr/single_cell_qc.nf b/modules/local/enchantr/single_cell_qc.nf index 36733e4d..6b232155 100644 --- a/modules/local/enchantr/single_cell_qc.nf +++ b/modules/local/enchantr/single_cell_qc.nf @@ -24,8 +24,8 @@ process SINGLE_CELL_QC { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: path(tabs) diff --git a/modules/local/enchantr/validate_input.nf b/modules/local/enchantr/validate_input.nf index 224b391a..0dcd884e 100644 --- a/modules/local/enchantr/validate_input.nf +++ b/modules/local/enchantr/validate_input.nf @@ -10,8 +10,8 @@ process VALIDATE_INPUT { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: file samplesheet diff --git a/modules/local/reveal/add_meta_to_tab.nf b/modules/local/reveal/add_meta_to_tab.nf index 0423695b..67c930d6 100644 --- a/modules/local/reveal/add_meta_to_tab.nf +++ b/modules/local/reveal/add_meta_to_tab.nf @@ -7,8 +7,8 @@ process ADD_META_TO_TAB { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" cache 'deep' // Without 'deep' this process would run when using -resume diff --git a/modules/local/reveal/filter_junction_mod3.nf b/modules/local/reveal/filter_junction_mod3.nf index 75a06eac..c373ddbf 100644 --- a/modules/local/reveal/filter_junction_mod3.nf +++ b/modules/local/reveal/filter_junction_mod3.nf @@ -7,8 +7,8 @@ process FILTER_JUNCTION_MOD3 { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: tuple val(meta), path(tab) // sequence tsv in AIRR format diff --git a/modules/local/reveal/filter_quality.nf b/modules/local/reveal/filter_quality.nf index abd71995..46062cb9 100644 --- a/modules/local/reveal/filter_quality.nf +++ b/modules/local/reveal/filter_quality.nf @@ -7,8 +7,8 @@ process FILTER_QUALITY { error "nf-core/airrflow currently does not support Conda. Please use a container profile instead." } container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'docker.io/immcantation/airrflow:3.2.0': - 'docker.io/immcantation/airrflow:3.2.0' }" + 'docker.io/immcantation/airrflow:3.3.0': + 'docker.io/immcantation/airrflow:3.3.0' }" input: tuple val(meta), path(tab) // sequence tsv in AIRR format diff --git a/modules/nf-core/custom/dumpsoftwareversions/environment.yml b/modules/nf-core/custom/dumpsoftwareversions/environment.yml new file mode 100644 index 00000000..9b3272bc --- /dev/null +++ b/modules/nf-core/custom/dumpsoftwareversions/environment.yml @@ -0,0 +1,7 @@ +name: custom_dumpsoftwareversions +channels: + - conda-forge + - bioconda + - defaults +dependencies: + - bioconda::multiqc=1.19 diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf index ebc87273..f2187611 100644 --- a/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ b/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -2,10 +2,10 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda "bioconda::multiqc=1.14" + conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.14--pyhdfd78af_0' : - 'biocontainers/multiqc:1.14--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' : + 'biocontainers/multiqc:1.19--pyhdfd78af_0' }" input: path versions diff --git a/modules/nf-core/custom/dumpsoftwareversions/meta.yml b/modules/nf-core/custom/dumpsoftwareversions/meta.yml index c32657de..5f15a5fd 100644 --- a/modules/nf-core/custom/dumpsoftwareversions/meta.yml +++ b/modules/nf-core/custom/dumpsoftwareversions/meta.yml @@ -1,4 +1,4 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json name: custom_dumpsoftwareversions description: Custom module used to dump software versions within the nf-core pipeline template keywords: @@ -16,7 +16,6 @@ input: type: file description: YML file containing software versions pattern: "*.yml" - output: - yml: type: file @@ -30,7 +29,9 @@ output: type: file description: File containing software versions pattern: "versions.yml" - authors: - "@drpatelh" - "@grst" +maintainers: + - "@drpatelh" + - "@grst" diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test new file mode 100644 index 00000000..b1e1630b --- /dev/null +++ b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test @@ -0,0 +1,43 @@ +nextflow_process { + + name "Test Process CUSTOM_DUMPSOFTWAREVERSIONS" + script "../main.nf" + process "CUSTOM_DUMPSOFTWAREVERSIONS" + tag "modules" + tag "modules_nfcore" + tag "custom" + tag "dumpsoftwareversions" + tag "custom/dumpsoftwareversions" + + test("Should run without failures") { + when { + process { + """ + def tool1_version = ''' + TOOL1: + tool1: 0.11.9 + '''.stripIndent() + + def tool2_version = ''' + TOOL2: + tool2: 1.9 + '''.stripIndent() + + input[0] = Channel.of(tool1_version, tool2_version).collectFile() + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot( + process.out.versions, + file(process.out.mqc_yml[0]).readLines()[0..10], + file(process.out.yml[0]).readLines()[0..7] + ).match() + } + ) + } + } +} diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap new file mode 100644 index 00000000..5f59a936 --- /dev/null +++ b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap @@ -0,0 +1,33 @@ +{ + "Should run without failures": { + "content": [ + [ + "versions.yml:md5,76d454d92244589d32455833f7c1ba6d" + ], + [ + "data: \"\\n\\n \\n \\n \\n \\n \\n \\n \\n\\", + " \\n\\n\\n \\n \\n\\", + " \\ \\n\\n\\n\\n \\n \\", + " \\ \\n \\n\\n\\n\\n\\", + " \\n\\n \\n \\n\\", + " \\ \\n\\n\\n\\n\\n\\n \\n\\", + " \\ \\n \\n\\n\\n\\n\\", + " \\n\\n \\n \\n\\" + ], + [ + "CUSTOM_DUMPSOFTWAREVERSIONS:", + " python: 3.11.7", + " yaml: 5.4.1", + "TOOL1:", + " tool1: 0.11.9", + "TOOL2:", + " tool2: '1.9'", + "Workflow:" + ] + ], + "timestamp": "2024-01-09T23:01:18.710682" + } +} \ No newline at end of file diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml b/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml new file mode 100644 index 00000000..405aa24a --- /dev/null +++ b/modules/nf-core/custom/dumpsoftwareversions/tests/tags.yml @@ -0,0 +1,2 @@ +custom/dumpsoftwareversions: + - modules/nf-core/custom/dumpsoftwareversions/** diff --git a/modules/nf-core/fastqc/environment.yml b/modules/nf-core/fastqc/environment.yml new file mode 100644 index 00000000..1787b38a --- /dev/null +++ b/modules/nf-core/fastqc/environment.yml @@ -0,0 +1,7 @@ +name: fastqc +channels: + - conda-forge + - bioconda + - defaults +dependencies: + - bioconda::fastqc=0.12.1 diff --git a/modules/nf-core/fastqc/main.nf b/modules/nf-core/fastqc/main.nf index 67209f79..9e19a74c 100644 --- a/modules/nf-core/fastqc/main.nf +++ b/modules/nf-core/fastqc/main.nf @@ -2,7 +2,7 @@ process FASTQC { tag "$meta.id" label 'process_medium' - conda "bioconda::fastqc=0.12.1" + conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/fastqc:0.12.1--hdfd78af_0' : 'biocontainers/fastqc:0.12.1--hdfd78af_0' }" @@ -37,7 +37,7 @@ process FASTQC { cat <<-END_VERSIONS > versions.yml "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + fastqc: \$( fastqc --version | sed '/FastQC v/!d; s/.*v//' ) END_VERSIONS """ @@ -49,7 +49,7 @@ process FASTQC { cat <<-END_VERSIONS > versions.yml "${task.process}": - fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + fastqc: \$( fastqc --version | sed '/FastQC v/!d; s/.*v//' ) END_VERSIONS """ } diff --git a/modules/nf-core/fastqc/tests/main.nf.test b/modules/nf-core/fastqc/tests/main.nf.test index 6437a144..1f21c664 100644 --- a/modules/nf-core/fastqc/tests/main.nf.test +++ b/modules/nf-core/fastqc/tests/main.nf.test @@ -3,24 +3,20 @@ nextflow_process { name "Test Process FASTQC" script "../main.nf" process "FASTQC" + tag "modules" tag "modules_nfcore" tag "fastqc" - test("Single-Read") { + test("sarscov2 single-end [fastq]") { when { - params { - outdir = "$outputDir" - } process { """ - input[0] = [ + input[0] = Channel.of([ [ id: 'test', single_end:true ], - [ - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) - ] - ] + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ] + ]) """ } } @@ -28,14 +24,189 @@ nextflow_process { then { assertAll ( { assert process.success }, + // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. // looks like this:
Mon 2 Oct 2023
test.gz
// https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 - { assert process.out.html.get(0).get(1) ==~ ".*/test_fastqc.html" }, - { assert path(process.out.html.get(0).get(1)).getText().contains("") }, - { assert snapshot(process.out.versions).match("versions") }, - { assert process.out.zip.get(0).get(1) ==~ ".*/test_fastqc.zip" } + + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 paired-end [fastq]") { + + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("") }, + { assert path(process.out.html[0][1][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 interleaved [fastq]") { + + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } ) } } + + test("sarscov2 paired-end [bam]") { + + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 multiple [fastq]") { + + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, + { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, + { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("") }, + { assert path(process.out.html[0][1][1]).text.contains("") }, + { assert path(process.out.html[0][1][2]).text.contains("") }, + { assert path(process.out.html[0][1][3]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 custom_prefix") { + + when { + process { + """ + input[0] = Channel.of([ + [ id:'mysample', single_end:true ], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + + { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("") }, + + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 single-end [fastq] - stub") { + + options "-stub" + + when { + process { + """ + input[0] = Channel.of([ + [ id: 'test', single_end:true ], + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out.html.collect { file(it[1]).getName() } + + process.out.zip.collect { file(it[1]).getName() } + + process.out.versions ).match() } + ) + } + } + } diff --git a/modules/nf-core/fastqc/tests/main.nf.test.snap b/modules/nf-core/fastqc/tests/main.nf.test.snap index 636a32ce..5d624bb8 100644 --- a/modules/nf-core/fastqc/tests/main.nf.test.snap +++ b/modules/nf-core/fastqc/tests/main.nf.test.snap @@ -1,10 +1,20 @@ { + "sarscov2 single-end [fastq] - stub": { + "content": [ + [ + "test.html", + "test.zip", + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ] + ], + "timestamp": "2024-01-17T18:40:57.254299" + }, "versions": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], - "timestamp": "2023-10-09T23:40:54+0000" + "timestamp": "2024-01-17T18:36:50.033627" } } \ No newline at end of file diff --git a/modules/nf-core/multiqc/environment.yml b/modules/nf-core/multiqc/environment.yml new file mode 100644 index 00000000..7625b752 --- /dev/null +++ b/modules/nf-core/multiqc/environment.yml @@ -0,0 +1,7 @@ +name: multiqc +channels: + - conda-forge + - bioconda + - defaults +dependencies: + - bioconda::multiqc=1.19 diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf index 1fc387be..1b9f7c43 100644 --- a/modules/nf-core/multiqc/main.nf +++ b/modules/nf-core/multiqc/main.nf @@ -1,10 +1,10 @@ process MULTIQC { label 'process_single' - conda "bioconda::multiqc=1.14" + conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.14--pyhdfd78af_0' : - 'biocontainers/multiqc:1.14--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' : + 'biocontainers/multiqc:1.19--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" @@ -25,12 +25,14 @@ process MULTIQC { def args = task.ext.args ?: '' def config = multiqc_config ? "--config $multiqc_config" : '' def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' + def logo = multiqc_logo ? /--cl-config 'custom_logo: "${multiqc_logo}"'/ : '' """ multiqc \\ --force \\ $args \\ $config \\ $extra_config \\ + $logo \\ . cat <<-END_VERSIONS > versions.yml @@ -41,7 +43,7 @@ process MULTIQC { stub: """ - touch multiqc_data + mkdir multiqc_data touch multiqc_plots touch multiqc_report.html diff --git a/modules/nf-core/multiqc/meta.yml b/modules/nf-core/multiqc/meta.yml index f93b5ee5..45a9bc35 100644 --- a/modules/nf-core/multiqc/meta.yml +++ b/modules/nf-core/multiqc/meta.yml @@ -1,5 +1,4 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json -name: MultiQC +name: multiqc description: Aggregate results from bioinformatics analyses across many samples into a single report keywords: - QC @@ -13,7 +12,6 @@ tools: homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ licence: ["GPL-3.0-or-later"] - input: - multiqc_files: type: file @@ -31,7 +29,6 @@ input: type: file description: Optional logo file for MultiQC pattern: "*.{png}" - output: - report: type: file @@ -54,3 +51,8 @@ authors: - "@bunop" - "@drpatelh" - "@jfy133" +maintainers: + - "@abhi18av" + - "@bunop" + - "@drpatelh" + - "@jfy133" diff --git a/modules/nf-core/multiqc/tests/main.nf.test b/modules/nf-core/multiqc/tests/main.nf.test new file mode 100644 index 00000000..d0438eda --- /dev/null +++ b/modules/nf-core/multiqc/tests/main.nf.test @@ -0,0 +1,83 @@ +nextflow_process { + + name "Test Process MULTIQC" + script "../main.nf" + process "MULTIQC" + tag "modules" + tag "modules_nfcore" + tag "multiqc" + + test("sarscov2 single-end [fastqc]") { + + when { + process { + """ + input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[1] = [] + input[2] = [] + input[3] = [] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert process.out.report[0] ==~ ".*/multiqc_report.html" }, + { assert process.out.data[0] ==~ ".*/multiqc_data" }, + { assert snapshot(process.out.versions).match("versions") } + ) + } + + } + + test("sarscov2 single-end [fastqc] [config]") { + + when { + process { + """ + input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true)) + input[2] = [] + input[3] = [] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert process.out.report[0] ==~ ".*/multiqc_report.html" }, + { assert process.out.data[0] ==~ ".*/multiqc_data" }, + { assert snapshot(process.out.versions).match("versions") } + ) + } + } + + test("sarscov2 single-end [fastqc] - stub") { + + options "-stub" + + when { + process { + """ + input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)]) + input[1] = [] + input[2] = [] + input[3] = [] + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.report.collect { file(it).getName() } + + process.out.data.collect { file(it).getName() } + + process.out.plots.collect { file(it).getName() } + + process.out.versions ).match() } + ) + } + + } +} diff --git a/modules/nf-core/multiqc/tests/main.nf.test.snap b/modules/nf-core/multiqc/tests/main.nf.test.snap new file mode 100644 index 00000000..d37e7304 --- /dev/null +++ b/modules/nf-core/multiqc/tests/main.nf.test.snap @@ -0,0 +1,21 @@ +{ + "versions": { + "content": [ + [ + "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d" + ] + ], + "timestamp": "2024-01-09T23:02:49.911994" + }, + "sarscov2 single-end [fastqc] - stub": { + "content": [ + [ + "multiqc_report.html", + "multiqc_data", + "multiqc_plots", + "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d" + ] + ], + "timestamp": "2024-01-09T23:03:14.524346" + } +} \ No newline at end of file diff --git a/modules/nf-core/multiqc/tests/tags.yml b/modules/nf-core/multiqc/tests/tags.yml new file mode 100644 index 00000000..bea6c0d3 --- /dev/null +++ b/modules/nf-core/multiqc/tests/tags.yml @@ -0,0 +1,2 @@ +multiqc: + - modules/nf-core/multiqc/** diff --git a/nextflow.config b/nextflow.config index 807ebbee..65d762f3 100644 --- a/nextflow.config +++ b/nextflow.config @@ -72,7 +72,7 @@ params { // bulk filtering options // ----------------------- remove_chimeric = false - detect_contamination = null + detect_contamination = false collapseby = 'sample_id' // ----------------------- @@ -103,8 +103,8 @@ params { // ----------------------- // References - igenomes_base = 's3://ngi-igenomes/igenomes' - igenomes_ignore = true + igenomes_base = 's3://ngi-igenomes/igenomes' + igenomes_ignore = true // MultiQC options skip_multiqc = false @@ -160,7 +160,7 @@ try { } // Load nf-core/airrflow custom profiles from different institutions. -// Warning: Uncomment only if a pipeline-specific instititutional config already exists on nf-core/configs! +// Warning: Uncomment only if a pipeline-specific institutional config already exists on nf-core/configs! // try { // includeConfig "${params.custom_config_base}/pipeline/airrflow.config" // } catch (Exception e) { @@ -171,6 +171,7 @@ profiles { dumpHashes = true process.beforeScript = 'echo $HOSTNAME' cleanup = false + nextflow.enable.configProcessNamesValidation = true } conda { conda.enabled = true @@ -179,6 +180,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false + channels = ['conda-forge', 'bioconda', 'defaults'] apptainer.enabled = false } mamba { @@ -193,16 +195,16 @@ profiles { } docker { docker.enabled = true - docker.userEmulation = true conda.enabled = false singularity.enabled = false podman.enabled = false shifter.enabled = false charliecloud.enabled = false apptainer.enabled = false + docker.runOptions = '-u $(id -u):$(id -g)' } arm { - docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' + docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' } singularity { conda.enabled = false @@ -282,7 +284,7 @@ singularity.registry = 'quay.io' // Nextflow plugins plugins { - id 'nf-validation' // Validation of pipeline parameters and creation of an input channel from a sample sheet + id 'nf-validation@1.1.3' // Validation of pipeline parameters and creation of an input channel from a sample sheet } // Load igenomes.config if required @@ -307,6 +309,9 @@ env { // Capture exit codes from upstream processes when piping process.shell = ['/bin/bash', '-euo', 'pipefail'] +// Disable process selector warnings by default. Use debug profile to enable warnings. +nextflow.enable.configProcessNamesValidation = false + def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true @@ -332,7 +337,7 @@ manifest { description = """B and T cell repertoire analysis pipeline with the Immcantation framework.""" mainScript = 'main.nf' nextflowVersion = '!>=23.04.0' - version = '3.2.1dev' + version = '3.2.1dev' doi = '10.5281/zenodo.2642009' } diff --git a/nextflow_schema.json b/nextflow_schema.json index e1808a42..fee240e9 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -45,7 +45,7 @@ }, "miairr": { "type": "string", - "default": "airrflow/assets/reveal/mapping_MiAIRR_BioSample_v1.3.1.tsv", + "default": "${projectDir}/assets/reveal/mapping_MiAIRR_BioSample_v1.3.1.tsv", "description": "Path to MiAIRR-BioSample mapping", "fa_icon": "fas fa-table" } @@ -167,7 +167,6 @@ }, "adapter_fasta": { "type": "string", - "default": "None", "fa_icon": "fas fa-file", "description": "Fasta file with adapter sequences to be trimmed." }, @@ -235,7 +234,7 @@ "type": "string", "default": "cut", "description": "Masking mode for the pRESTO MaskPrimer step. Available: cut, mask, trim, tag.", - "enum": ["cut", "mask", "trim", "tag"], + "enum": ["cut", "mask", "tag", "trim"], "help_text": "The primer masking modes will perform the following actions:\n\n* `cut`: remove both the primer region and the preceding sequence.\n* `mask`: replace the primer region with Ns and remove the preceding sequence.\n* `trim`: remove the region preceding the primer, but leave the primer region intact.\n* `tag`: leave the input sequence unmodified.", "fa_icon": "fas fa-mask" }, @@ -335,6 +334,16 @@ "default": "", "properties": { "clonal_threshold": { + "oneOf": [ + { + "type": "string", + "enum": ["auto"] + }, + { + "type": "number", + "minimum": 0 + } + ], "type": ["string", "number"], "default": "auto", "fa_icon": "fab fa-pagelines", diff --git a/pyproject.toml b/pyproject.toml index 0d62beb6..7d08e1c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,13 @@ -# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Black. +# Config file for Python. Mostly used to configure linting of bin/*.py with Ruff. # Should be kept the same as nf-core/tools to avoid fighting with template synchronisation. -[tool.black] +[tool.ruff] line-length = 120 -target_version = ["py37", "py38", "py39", "py310"] +target-version = "py38" +select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] +cache-dir = "~/.cache/ruff" -[tool.isort] -profile = "black" -known_first_party = ["nf_core"] -multi_line_output = 3 +[tool.ruff.isort] +known-first-party = ["nf_core"] + +[tool.ruff.per-file-ignores] +"__init__.py" = ["E402", "F401"] diff --git a/subworkflows/local/bulk_qc_and_filter.nf b/subworkflows/local/bulk_qc_and_filter.nf index 247a0341..34b082d1 100644 --- a/subworkflows/local/bulk_qc_and_filter.nf +++ b/subworkflows/local/bulk_qc_and_filter.nf @@ -23,7 +23,7 @@ workflow BULK_QC_AND_FILTER { ch_imgt.collect() ) ch_logs = ch_logs.mix(CHANGEO_CREATEGERMLINES.out.logs) - ch_versions = ch_versions.mix(CHANGEO_CREATEGERMLINES.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(CHANGEO_CREATEGERMLINES.out.versions) // Remove chimera REMOVE_CHIMERIC( @@ -31,7 +31,7 @@ workflow BULK_QC_AND_FILTER { ch_imgt.collect() ) ch_logs = ch_logs.mix(REMOVE_CHIMERIC.out.logs) - ch_versions = ch_versions.mix(REMOVE_CHIMERIC.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(REMOVE_CHIMERIC.out.versions) ch_bulk_chimeric_pass = REMOVE_CHIMERIC.out.tab @@ -51,14 +51,14 @@ workflow BULK_QC_AND_FILTER { .collect() ) ch_logs = ch_logs.mix(DETECT_CONTAMINATION.out.logs) - ch_versions = ch_versions.mix(DETECT_CONTAMINATION.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(DETECT_CONTAMINATION.out.versions) } COLLAPSE_DUPLICATES( ch_bulk_chimeric_pass ) - ch_versions = ch_versions.mix(COLLAPSE_DUPLICATES.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(COLLAPSE_DUPLICATES.out.versions) ch_logs = ch_logs.mix(COLLAPSE_DUPLICATES.out.logs) emit: diff --git a/subworkflows/local/presto_sans_umi.nf b/subworkflows/local/presto_sans_umi.nf index 59a2db57..dca5d1b2 100644 --- a/subworkflows/local/presto_sans_umi.nf +++ b/subworkflows/local/presto_sans_umi.nf @@ -33,25 +33,25 @@ workflow PRESTO_SANS_UMI { params.save_trimmed, save_merged ) - ch_versions = ch_versions.mix(FASTP.out.versions.ifEmpty([])) + ch_versions = ch_versions.mix(FASTP.out.versions) ch_gunzip = FASTP.out.reads.map{ meta,reads -> [meta, reads[0], reads[1]] } // gunzip fastq.gz to fastq GUNZIP_SANS_UMI ( ch_gunzip ) - ch_versions = ch_versions.mix(GUNZIP_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(GUNZIP_SANS_UMI.out.versions) // Assemble read pairs PRESTO_ASSEMBLEPAIRS_SANS_UMI ( GUNZIP_SANS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_ASSEMBLEPAIRS_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_ASSEMBLEPAIRS_SANS_UMI.out.versions) // Filter sequences by quality score PRESTO_FILTERSEQ_POSTASSEMBLY_SANS_UMI ( PRESTO_ASSEMBLEPAIRS_SANS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_FILTERSEQ_POSTASSEMBLY_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_FILTERSEQ_POSTASSEMBLY_SANS_UMI.out.versions) // Mask primers PRESTO_MASKPRIMERS_POSTASSEMBLY_SANS_UMI ( @@ -59,37 +59,37 @@ workflow PRESTO_SANS_UMI { ch_cprimers.collect(), ch_vprimers.collect() ) - ch_versions = ch_versions.mix(PRESTO_MASKPRIMERS_POSTASSEMBLY_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_MASKPRIMERS_POSTASSEMBLY_SANS_UMI.out.versions) // Generate QC stats after reads paired and filtered but before collapsed FASTQC_POSTASSEMBLY_SANS_UMI ( PRESTO_MASKPRIMERS_POSTASSEMBLY_SANS_UMI.out.reads ) - ch_versions = ch_versions.mix(FASTQC_POSTASSEMBLY_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(FASTQC_POSTASSEMBLY_SANS_UMI.out.versions) // Annotate primers in C_PRIMER and V_PRIMER field PRESTO_PARSEHEADERS_PRIMERS_SANS_UMI ( PRESTO_MASKPRIMERS_POSTASSEMBLY_SANS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_PRIMERS_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_PRIMERS_SANS_UMI.out.versions) // Annotate metadata on primer headers PRESTO_PARSEHEADERS_METADATA_SANS_UMI ( PRESTO_PARSEHEADERS_PRIMERS_SANS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_METADATA_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_METADATA_SANS_UMI.out.versions) // Mark and count duplicate sequences (DUPCOUNT) PRESTO_COLLAPSESEQ_SANS_UMI ( PRESTO_PARSEHEADERS_METADATA_SANS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_COLLAPSESEQ_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_COLLAPSESEQ_SANS_UMI.out.versions) // Filter out sequences with less than 2 representative duplicates PRESTO_SPLITSEQ_SANS_UMI ( PRESTO_COLLAPSESEQ_SANS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_SPLITSEQ_SANS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_SPLITSEQ_SANS_UMI.out.versions) emit: fasta = PRESTO_SPLITSEQ_SANS_UMI.out.fasta diff --git a/subworkflows/local/presto_umi.nf b/subworkflows/local/presto_umi.nf index 0c5b92aa..8d7d8713 100644 --- a/subworkflows/local/presto_umi.nf +++ b/subworkflows/local/presto_umi.nf @@ -48,7 +48,7 @@ workflow PRESTO_UMI { params.save_trimmed, save_merged ) - ch_versions = ch_versions.mix(FASTP.out.versions.ifEmpty([])) + ch_versions = ch_versions.mix(FASTP.out.versions) //ch for merge umi ch_meta_R1_R2 = FASTP.out.reads @@ -60,7 +60,7 @@ workflow PRESTO_UMI { MERGE_UMI ( ch_meta_R1_R2_index ) ch_gunzip = MERGE_UMI.out.reads - ch_versions = ch_versions.mix(MERGE_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(MERGE_UMI.out.versions) } else { @@ -73,7 +73,7 @@ workflow PRESTO_UMI { params.save_trimmed, save_merged ) - ch_versions = ch_versions.mix(FASTP.out.versions.ifEmpty([])) + ch_versions = ch_versions.mix(FASTP.out.versions) ch_rename_fastq_umi = FASTP.out.reads.map{ meta,reads -> [meta, reads[0], reads[1]] } @@ -84,11 +84,11 @@ workflow PRESTO_UMI { // gunzip fastq.gz to fastq GUNZIP_UMI ( ch_gunzip ) - ch_versions = ch_versions.mix(GUNZIP_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(GUNZIP_UMI.out.versions) // Filter sequences by quality score PRESTO_FILTERSEQ_UMI ( GUNZIP_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_FILTERSEQ_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_FILTERSEQ_UMI.out.versions) // Mask primers PRESTO_MASKPRIMERS_UMI ( @@ -96,13 +96,13 @@ workflow PRESTO_UMI { ch_cprimers.collect(), ch_vprimers.collect() ) - ch_versions = ch_versions.mix(PRESTO_MASKPRIMERS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_MASKPRIMERS_UMI.out.versions) // Pre-consensus pair PRESTO_PAIRSEQ_UMI ( PRESTO_MASKPRIMERS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_PAIRSEQ_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_PAIRSEQ_UMI.out.versions) if (params.cluster_sets) { @@ -110,13 +110,13 @@ workflow PRESTO_UMI { PRESTO_CLUSTERSETS_UMI ( PRESTO_PAIRSEQ_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_CLUSTERSETS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_CLUSTERSETS_UMI.out.versions) // Annotate cluster into barcode field PRESTO_PARSE_CLUSTER_UMI ( PRESTO_CLUSTERSETS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_PARSE_CLUSTER_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_PARSE_CLUSTER_UMI.out.versions) ch_for_buildconsensus = PRESTO_PARSE_CLUSTER_UMI.out.reads ch_clustersets_logs = PRESTO_CLUSTERSETS_UMI.out.logs.collect() @@ -129,55 +129,55 @@ workflow PRESTO_UMI { PRESTO_BUILDCONSENSUS_UMI ( ch_for_buildconsensus ) - ch_versions = ch_versions.mix(PRESTO_BUILDCONSENSUS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_BUILDCONSENSUS_UMI.out.versions) // Post-consensus pair PRESTO_POSTCONSENSUS_PAIRSEQ_UMI ( PRESTO_BUILDCONSENSUS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_POSTCONSENSUS_PAIRSEQ_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_POSTCONSENSUS_PAIRSEQ_UMI.out.versions) // Assemble read pairs PRESTO_ASSEMBLEPAIRS_UMI ( PRESTO_POSTCONSENSUS_PAIRSEQ_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_ASSEMBLEPAIRS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_ASSEMBLEPAIRS_UMI.out.versions) // Generate QC stats after reads paired and filtered but before collapsed FASTQC_POSTASSEMBLY_UMI ( PRESTO_ASSEMBLEPAIRS_UMI.out.reads ) - ch_versions = ch_versions.mix(FASTQC_POSTASSEMBLY_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(FASTQC_POSTASSEMBLY_UMI.out.versions) // Combine UMI duplicate count PRESTO_PARSEHEADERS_COLLAPSE_UMI ( PRESTO_ASSEMBLEPAIRS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_COLLAPSE_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_COLLAPSE_UMI.out.versions) // Annotate primers in C_PRIMER and V_PRIMER field PRESTO_PARSEHEADERS_PRIMERS_UMI ( PRESTO_PARSEHEADERS_COLLAPSE_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_PRIMERS_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_PRIMERS_UMI.out.versions) // Annotate metadata on primer headers PRESTO_PARSEHEADERS_METADATA_UMI ( PRESTO_PARSEHEADERS_PRIMERS_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_METADATA_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_PARSEHEADERS_METADATA_UMI.out.versions) // Mark and count duplicate sequences with different UMI barcodes (DUPCOUNT) PRESTO_COLLAPSESEQ_UMI ( PRESTO_PARSEHEADERS_METADATA_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_COLLAPSESEQ_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_COLLAPSESEQ_UMI.out.versions) // Filter out sequences with less than 2 representative duplicates with different UMIs PRESTO_SPLITSEQ_UMI ( PRESTO_COLLAPSESEQ_UMI.out.reads ) - ch_versions = ch_versions.mix(PRESTO_SPLITSEQ_UMI.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(PRESTO_SPLITSEQ_UMI.out.versions) emit: fasta = PRESTO_SPLITSEQ_UMI.out.fasta diff --git a/subworkflows/local/repertoire_analysis_reporting.nf b/subworkflows/local/repertoire_analysis_reporting.nf index 7b3e583f..0cd0f5e3 100644 --- a/subworkflows/local/repertoire_analysis_reporting.nf +++ b/subworkflows/local/repertoire_analysis_reporting.nf @@ -71,7 +71,7 @@ workflow REPERTOIRE_ANALYSIS_REPORTING { AIRRFLOW_REPORT( ch_repertoires, ch_parsed_logs.collect().ifEmpty([]), - REPORT_FILE_SIZE.out.table.ifEmpty([]), + REPORT_FILE_SIZE.out.table.collect().ifEmpty([]), ch_report_rmd, ch_report_css, ch_report_logo diff --git a/subworkflows/local/single_cell_qc_and_filtering.nf b/subworkflows/local/single_cell_qc_and_filtering.nf index 9de2701e..47cd520b 100644 --- a/subworkflows/local/single_cell_qc_and_filtering.nf +++ b/subworkflows/local/single_cell_qc_and_filtering.nf @@ -28,7 +28,7 @@ workflow SINGLE_CELL_QC_AND_FILTERING { .set{ch_repertoire_after_scqc_with_sampleid} ch_logs = ch_logs.mix(SINGLE_CELL_QC.out.logs) - ch_versions = ch_versions.mix(SINGLE_CELL_QC.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(SINGLE_CELL_QC.out.versions) ch_repertoire_after_scqc_withmeta = ch_onlymeta.join(ch_repertoire_after_scqc_with_sampleid) .map{ it -> [ it[1], it[2] ]} diff --git a/subworkflows/local/vdj_annotation.nf b/subworkflows/local/vdj_annotation.nf index d30375c6..c80d3503 100644 --- a/subworkflows/local/vdj_annotation.nf +++ b/subworkflows/local/vdj_annotation.nf @@ -31,7 +31,7 @@ workflow VDJ_ANNOTATION { .set { ch_igblast_zipped } UNZIP_IGBLAST( ch_igblast_zipped.collect() ) ch_igblast = UNZIP_IGBLAST.out.unzipped - ch_versions = ch_versions.mix(UNZIP_IGBLAST.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(UNZIP_IGBLAST.out.versions) } else { Channel.fromPath("${params.igblast_base}") .ifEmpty { error "IGBLAST DB not found: ${params.igblast_base}" } @@ -46,7 +46,7 @@ workflow VDJ_ANNOTATION { .set { ch_imgt_zipped } UNZIP_IMGT( ch_imgt_zipped.collect() ) ch_imgt = UNZIP_IMGT.out.unzipped - ch_versions = ch_versions.mix(UNZIP_IMGT.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(UNZIP_IMGT.out.versions) } else { Channel.fromPath("${params.imgtdb_base}") .ifEmpty { error "IMGT DB not found: ${params.imgtdb_base}" } @@ -58,7 +58,7 @@ workflow VDJ_ANNOTATION { FETCH_DATABASES() ch_igblast = FETCH_DATABASES.out.igblast ch_imgt = FETCH_DATABASES.out.imgt - ch_versions = ch_versions.mix(FETCH_DATABASES.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(FETCH_DATABASES.out.versions) } CHANGEO_ASSIGNGENES ( @@ -67,7 +67,7 @@ workflow VDJ_ANNOTATION { ) ch_logs = ch_logs.mix(CHANGEO_ASSIGNGENES.out.logs) - ch_versions = ch_versions.mix(CHANGEO_ASSIGNGENES.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(CHANGEO_ASSIGNGENES.out.versions) CHANGEO_MAKEDB ( CHANGEO_ASSIGNGENES.out.fasta, @@ -75,7 +75,7 @@ workflow VDJ_ANNOTATION { ch_imgt.collect() ) ch_logs = ch_logs.mix(CHANGEO_MAKEDB.out.logs) - ch_versions = ch_versions.mix(CHANGEO_MAKEDB.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(CHANGEO_MAKEDB.out.versions) ch_assigned_tab = CHANGEO_MAKEDB.out.tab ch_assignment_logs = CHANGEO_MAKEDB.out.logs @@ -88,25 +88,25 @@ workflow VDJ_ANNOTATION { ch_assigned_tab ) ch_logs = ch_logs.mix(FILTER_QUALITY.out.logs) - ch_versions = ch_versions.mix(FILTER_QUALITY.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(FILTER_QUALITY.out.versions) if (params.productive_only) { CHANGEO_PARSEDB_SPLIT ( FILTER_QUALITY.out.tab ) ch_logs = ch_logs.mix(CHANGEO_PARSEDB_SPLIT.out.logs) - ch_versions = ch_versions.mix(CHANGEO_PARSEDB_SPLIT.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(CHANGEO_PARSEDB_SPLIT.out.versions) // Apply filter: junction length multiple of 3 FILTER_JUNCTION_MOD3( CHANGEO_PARSEDB_SPLIT.out.tab ) ch_logs = ch_logs.mix(FILTER_JUNCTION_MOD3.out.logs) - ch_versions = ch_versions.mix(FILTER_JUNCTION_MOD3.out.versions.ifEmpty(null)) - ch_repertoire = FILTER_JUNCTION_MOD3.out.tab.ifEmpty(null) + ch_versions = ch_versions.mix(FILTER_JUNCTION_MOD3.out.versions) + ch_repertoire = FILTER_JUNCTION_MOD3.out.tab } else { - ch_repertoire = FILTER_QUALITY.out.tab.ifEmpty(null) + ch_repertoire = FILTER_QUALITY.out.tab } ADD_META_TO_TAB( @@ -114,7 +114,7 @@ workflow VDJ_ANNOTATION { ch_validated_samplesheet ) ch_logs = ch_logs.mix(ADD_META_TO_TAB.out.logs) - ch_versions = ch_versions.mix(ADD_META_TO_TAB.out.versions.ifEmpty(null)) + ch_versions = ch_versions.mix(ADD_META_TO_TAB.out.versions) emit: diff --git a/workflows/airrflow.nf b/workflows/airrflow.nf index 03a1ed61..35e9b165 100644 --- a/workflows/airrflow.nf +++ b/workflows/airrflow.nf @@ -162,14 +162,14 @@ workflow AIRRFLOW { params.collapseby, params.cloneby ) - ch_versions = ch_versions.mix( ASSEMBLED_INPUT_CHECK.out.versions.ifEmpty([]) ) + ch_versions = ch_versions.mix( ASSEMBLED_INPUT_CHECK.out.versions ) if (params.reassign) { CHANGEO_CONVERTDB_FASTA_FROM_AIRR( ASSEMBLED_INPUT_CHECK.out.ch_tsv ) ch_fasta_from_tsv = CHANGEO_CONVERTDB_FASTA_FROM_AIRR.out.fasta - ch_versions = ch_versions.mix(CHANGEO_CONVERTDB_FASTA_FROM_AIRR.out.versions.ifEmpty([])) + ch_versions = ch_versions.mix(CHANGEO_CONVERTDB_FASTA_FROM_AIRR.out.versions) ch_reassign_logs = ch_reassign_logs.mix(CHANGEO_CONVERTDB_FASTA_FROM_AIRR.out.logs) } else { ch_fasta_from_tsv = Channel.empty() @@ -199,7 +199,7 @@ workflow AIRRFLOW { ch_fasta, ch_validated_samplesheet.collect() ) - ch_versions = ch_versions.mix( VDJ_ANNOTATION.out.versions.ifEmpty([])) + ch_versions = ch_versions.mix( VDJ_ANNOTATION.out.versions ) // Split bulk and single cell repertoires ch_repertoire_by_processing = VDJ_ANNOTATION.out.repertoire @@ -216,7 +216,7 @@ workflow AIRRFLOW { ch_repertoire_by_processing.bulk, VDJ_ANNOTATION.out.imgt.collect() ) - ch_versions = ch_versions.mix( BULK_QC_AND_FILTER.out.versions.ifEmpty([])) + ch_versions = ch_versions.mix( BULK_QC_AND_FILTER.out.versions.ifEmpty(null) ) ch_bulk_filtered = BULK_QC_AND_FILTER.out.repertoires @@ -227,7 +227,7 @@ workflow AIRRFLOW { SINGLE_CELL_QC_AND_FILTERING( ch_repertoire_by_processing.single ) - ch_versions = ch_versions.mix( SINGLE_CELL_QC_AND_FILTERING.out.versions.ifEmpty([]) ) + ch_versions = ch_versions.mix( SINGLE_CELL_QC_AND_FILTERING.out.versions.ifEmpty(null) ) // Mixing bulk and single cell channels for clonal analysis ch_repertoires_for_clones = ch_bulk_filtered @@ -240,7 +240,7 @@ workflow AIRRFLOW { VDJ_ANNOTATION.out.imgt.collect(), ch_report_logo_img.collect().ifEmpty([]) ) - ch_versions = ch_versions.mix( CLONAL_ANALYSIS.out.versions.ifEmpty([])) + ch_versions = ch_versions.mix( CLONAL_ANALYSIS.out.versions) if (!params.skip_report){ REPERTOIRE_ANALYSIS_REPORTING( @@ -288,8 +288,8 @@ workflow AIRRFLOW { ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) - ch_multiqc_files = ch_multiqc_files.mix(ch_fastp_html.ifEmpty([])) - ch_multiqc_files = ch_multiqc_files.mix(ch_fastp_json.ifEmpty([])) + ch_multiqc_files = ch_multiqc_files.mix(ch_fastp_html.collect().ifEmpty([])) + ch_multiqc_files = ch_multiqc_files.mix(ch_fastp_json.collect().ifEmpty([])) ch_multiqc_files = ch_multiqc_files.mix(ch_fastqc_postassembly_mqc.collect{it[1]}.ifEmpty([])) MULTIQC ( @@ -321,6 +321,13 @@ workflow.onComplete { } } +workflow.onError { + if (workflow.errorReport.contains("Process requirement exceeds available memory")) { + println("🛑 Default resources exceed availability 🛑 ") + println("💡 See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources 💡") + } +} + /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ THE END
Process Name \\", + " \\ Software Version
CUSTOM_DUMPSOFTWAREVERSIONSpython3.11.7
yaml5.4.1
TOOL1tool10.11.9
TOOL2tool21.9
WorkflowNextflow
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls
File typeConventional base calls