From 56f014799cf0b2a02dee0ddc3e82a1e68d8aec21 Mon Sep 17 00:00:00 2001 From: kevinmenden Date: Tue, 13 Jul 2021 10:53:20 +0200 Subject: [PATCH 001/227] Template update for nf-core/tools version 2.0 --- .editorconfig | 24 + .github/CONTRIBUTING.md | 2 +- .github/ISSUE_TEMPLATE/bug_report.md | 5 +- .github/PULL_REQUEST_TEMPLATE.md | 3 +- .github/workflows/awsfulltest.yml | 52 +- .github/workflows/awstest.yml | 50 +- .github/workflows/ci.yml | 19 +- .github/workflows/linting.yml | 21 +- .github/workflows/push_dockerhub_dev.yml | 28 - .github/workflows/push_dockerhub_release.yml | 29 - .gitignore | 1 - .github/markdownlint.yml => .markdownlint.yml | 2 + CITATIONS.md | 32 + Dockerfile | 13 - README.md | 71 +- assets/samplesheet.csv | 3 + assets/schema_input.json | 39 + assets/sendmail_template.txt | 36 +- bin/check_samplesheet.py | 146 +++ bin/markdown_to_html.py | 91 -- bin/scrape_software_versions.py | 44 +- conf/base.config | 94 +- conf/igenomes.config | 843 +++++++++--------- conf/modules.config | 32 + conf/test.config | 47 +- conf/test_full.config | 38 +- docs/README.md | 4 +- docs/images/mqc_fastqc_adapter.png | Bin 0 -> 23458 bytes docs/images/mqc_fastqc_counts.png | Bin 0 -> 33918 bytes docs/images/mqc_fastqc_quality.png | Bin 0 -> 55769 bytes docs/output.md | 63 +- docs/usage.md | 214 ++++- environment.yml | 15 - lib/Headers.groovy | 43 - lib/NfcoreSchema.groovy | 528 +++++------ lib/NfcoreTemplate.groovy | 266 ++++++ lib/Utils.groovy | 47 + lib/WorkflowMain.groovy | 94 ++ lib/WorkflowMhcquant.groovy | 59 ++ main.nf | 405 +-------- modules.json | 14 + modules/local/functions.nf | 68 ++ modules/local/get_software_versions.nf | 33 + modules/local/samplesheet_check.nf | 31 + modules/nf-core/modules/fastqc/functions.nf | 68 ++ modules/nf-core/modules/fastqc/main.nf | 47 + modules/nf-core/modules/fastqc/meta.yml | 51 ++ modules/nf-core/modules/multiqc/functions.nf | 68 ++ modules/nf-core/modules/multiqc/main.nf | 35 + modules/nf-core/modules/multiqc/meta.yml | 39 + nextflow.config | 300 ++++--- nextflow_schema.json | 255 +++--- subworkflows/local/input_check.nf | 42 + workflows/mhcquant.nf | 141 +++ 54 files changed, 2811 insertions(+), 1884 deletions(-) create mode 100644 .editorconfig delete mode 100644 .github/workflows/push_dockerhub_dev.yml delete mode 100644 .github/workflows/push_dockerhub_release.yml rename .github/markdownlint.yml => .markdownlint.yml (90%) create mode 100644 CITATIONS.md delete mode 100644 Dockerfile create mode 100644 assets/samplesheet.csv create mode 100644 assets/schema_input.json create mode 100755 bin/check_samplesheet.py delete mode 100755 bin/markdown_to_html.py create mode 100644 conf/modules.config create mode 100755 docs/images/mqc_fastqc_adapter.png create mode 100755 docs/images/mqc_fastqc_counts.png create mode 100755 docs/images/mqc_fastqc_quality.png delete mode 100644 environment.yml delete mode 100644 lib/Headers.groovy mode change 100644 => 100755 lib/NfcoreSchema.groovy create mode 100755 lib/NfcoreTemplate.groovy create mode 100755 lib/Utils.groovy create mode 100755 lib/WorkflowMain.groovy create mode 100755 lib/WorkflowMhcquant.groovy create mode 100644 modules.json create mode 100644 modules/local/functions.nf create mode 100644 modules/local/get_software_versions.nf create mode 100644 modules/local/samplesheet_check.nf create mode 100644 modules/nf-core/modules/fastqc/functions.nf create mode 100644 modules/nf-core/modules/fastqc/main.nf create mode 100644 modules/nf-core/modules/fastqc/meta.yml create mode 100644 modules/nf-core/modules/multiqc/functions.nf create mode 100644 modules/nf-core/modules/multiqc/main.nf create mode 100644 modules/nf-core/modules/multiqc/meta.yml create mode 100644 subworkflows/local/input_check.nf create mode 100644 workflows/mhcquant.nf diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..afb20bb1 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,24 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +indent_size = 4 +indent_style = space + +[*.{yml,yaml}] +indent_size = 2 + +# These files are edited and tested upstream in nf-core/modules +[/modules/nf-core/**] +charset = unset +end_of_line = unset +insert_final_newline = unset +trim_trailing_whitespace = unset +indent_style = unset +indent_size = unset + +[/assets/email*] +indent_size = unset diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index bc70bebd..aaa81423 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -74,7 +74,7 @@ If you wish to contribute a new step, please use the following coding standards: 7. Add sanity checks for all relevant parameters. 8. Add any new software to the `scrape_software_versions.py` script in `bin/` and the version command to the `scrape_software_versions` process in `main.nf`. 9. Do local tests that the new code works properly and as expected. -10. Add a new test command in `.github/workflow/ci.yaml`. +10. Add a new test command in `.github/workflow/ci.yml`. 11. If applicable add a [MultiQC](https://https://multiqc.info/) module. 12. Update MultiQC config `assets/multiqc_config.yaml` so relevant suffixes, name clean up, General Statistics Table column order, and module figures are in the right order. 13. Optional: Add any descriptions of MultiQC report sections and output files to `docs/output.md`. diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 380f093b..ea229fab 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -18,7 +18,7 @@ Please delete this text and anything that's not relevant from the template below I have checked the following places for your error: - [ ] [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) -- [ ] [nf-core/mhcquant pipeline documentation](https://nf-co.re/nf-core/mhcquant/usage) +- [ ] [nf-core/mhcquant pipeline documentation](https://nf-co.re/mhcquant/usage) ## Description of the bug @@ -51,13 +51,12 @@ Have you provided the following extra information/files: ## Nextflow Installation -- Version: +- Version: ## Container engine - Engine: - version: -- Image tag: ## Additional context diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 98bba18e..8c2143be 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -16,8 +16,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/mhcq - [ ] This comment contains a description of changes (with reason). - [ ] If you've fixed a bug or added code that should be tested, add tests! - - [ ] If you've added a new tool - add to the software_versions process and a regex to `scrape_software_versions.py` - - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](nf-core/mhcquant/tree/master/.github/CONTRIBUTING.md) + - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/mhcquant/tree/master/.github/CONTRIBUTING.md) - [ ] If necessary, also make a PR on the nf-core/mhcquant _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. - [ ] Make sure your code lints (`nf-core lint .`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml index 5edd4642..bca184b7 100644 --- a/.github/workflows/awsfulltest.yml +++ b/.github/workflows/awsfulltest.yml @@ -1,46 +1,34 @@ name: nf-core AWS full size tests # This workflow is triggered on published releases. -# It can be additionally triggered manually with GitHub actions workflow dispatch. +# It can be additionally triggered manually with GitHub actions workflow dispatch button. # It runs the -profile 'test_full' on AWS batch on: - workflow_run: - workflows: ["nf-core Docker push (release)"] - types: [completed] + release: + types: [published] workflow_dispatch: - - -env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TOWER_ACCESS_TOKEN: ${{ secrets.AWS_TOWER_TOKEN }} - AWS_JOB_DEFINITION: ${{ secrets.AWS_JOB_DEFINITION }} - AWS_JOB_QUEUE: ${{ secrets.AWS_JOB_QUEUE }} - AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} - - jobs: - run-awstest: + run-tower: name: Run AWS full tests if: github.repository == 'nf-core/mhcquant' runs-on: ubuntu-latest steps: - - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - python-version: 3.7 - - name: Install awscli - run: conda install -c conda-forge awscli - - name: Start AWS batch job + - name: Launch workflow via tower + uses: nf-core/tower-action@master # TODO nf-core: You can customise AWS full pipeline tests as required # Add full size test data (but still relatively small datasets for few samples) # on the `test_full.config` test runs with only one set of parameters - # Then specify `-profile test_full` instead of `-profile test` on the AWS batch command - run: | - aws batch submit-job \ - --region eu-west-1 \ - --job-name nf-core-mhcquant \ - --job-queue $AWS_JOB_QUEUE \ - --job-definition $AWS_JOB_DEFINITION \ - --container-overrides '{"command": ["nf-core/mhcquant", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://'"${AWS_S3_BUCKET}"'/mhcquant/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/mhcquant/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}' + + with: + workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} + bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} + compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} + pipeline: ${{ github.repository }} + revision: ${{ github.sha }} + workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/mhcquant/work-${{ github.sha }} + parameters: | + { + "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/mhcquant/results-${{ github.sha }}" + } + profiles: '[ "test_full", "aws_tower" ]' + diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index e09449a2..285b9d29 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -1,42 +1,28 @@ name: nf-core AWS test -# This workflow is triggered on push to the master branch. -# It can be additionally triggered manually with GitHub actions workflow dispatch. -# It runs the -profile 'test' on AWS batch. +# This workflow can be triggered manually with the GitHub actions workflow dispatch button. +# It runs the -profile 'test' on AWS batch on: workflow_dispatch: - - -env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TOWER_ACCESS_TOKEN: ${{ secrets.AWS_TOWER_TOKEN }} - AWS_JOB_DEFINITION: ${{ secrets.AWS_JOB_DEFINITION }} - AWS_JOB_QUEUE: ${{ secrets.AWS_JOB_QUEUE }} - AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} - - jobs: - run-awstest: + run-tower: name: Run AWS tests if: github.repository == 'nf-core/mhcquant' runs-on: ubuntu-latest steps: - - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2 + - name: Launch workflow via tower + uses: nf-core/tower-action@master + with: - auto-update-conda: true - python-version: 3.7 - - name: Install awscli - run: conda install -c conda-forge awscli - - name: Start AWS batch job - # TODO nf-core: You can customise CI pipeline run tests as required - # For example: adding multiple test runs with different parameters - # Remember that you can parallelise this by using strategy.matrix - run: | - aws batch submit-job \ - --region eu-west-1 \ - --job-name nf-core-mhcquant \ - --job-queue $AWS_JOB_QUEUE \ - --job-definition $AWS_JOB_DEFINITION \ - --container-overrides '{"command": ["nf-core/mhcquant", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://'"${AWS_S3_BUCKET}"'/mhcquant/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/mhcquant/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}' + workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} + bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} + compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} + pipeline: ${{ github.repository }} + revision: ${{ github.sha }} + workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/mhcquant/work-${{ github.sha }} + parameters: | + { + "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/mhcquant/results-${{ github.sha }}" + } + profiles: '[ "test", "aws_tower" ]' + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d35c8e66..8097523a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,28 +23,11 @@ jobs: strategy: matrix: # Nextflow versions: check pipeline minimum and current latest - nxf_ver: ['20.04.0', ''] + nxf_ver: ['21.04.0', ''] steps: - name: Check out pipeline code uses: actions/checkout@v2 - - name: Check if Dockerfile or Conda environment changed - uses: technote-space/get-diff-action@v4 - with: - FILES: | - Dockerfile - environment.yml - - - name: Build new docker image - if: env.MATCHED_FILES - run: docker build --no-cache . -t nfcore/mhcquant:dev - - - name: Pull docker image - if: ${{ !env.MATCHED_FILES }} - run: | - docker pull nfcore/mhcquant:dev - docker tag nfcore/mhcquant:dev nfcore/mhcquant:dev - - name: Install Nextflow env: CAPSULE_LOG: none diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index fcde400c..13b4fc81 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -18,7 +18,7 @@ jobs: - name: Install markdownlint run: npm install -g markdownlint-cli - name: Run Markdownlint - run: markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.github/markdownlint.yml + run: markdownlint . # If the above check failed, post a comment on the PR explaining the failure - name: Post PR comment @@ -35,8 +35,8 @@ jobs: * On Mac: `brew install markdownlint-cli` * Everything else: [Install `npm`](https://www.npmjs.com/get-npm) then [install `markdownlint-cli`](https://www.npmjs.com/package/markdownlint-cli) (`npm install -g markdownlint-cli`) * Fix the markdown errors - * Automatically: `markdownlint . --config .github/markdownlint.yml --fix` - * Manually resolve anything left from `markdownlint . --config .github/markdownlint.yml` + * Automatically: `markdownlint . --fix` + * Manually resolve anything left from `markdownlint .` Once you push these changes the test should pass, and you can hide this comment :+1: @@ -46,6 +46,20 @@ jobs: repo-token: ${{ secrets.GITHUB_TOKEN }} allow-repeats: false + EditorConfig: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-node@v1 + with: + node-version: "10" + + - name: Install editorconfig-checker + run: npm install -g editorconfig-checker + + - name: Run ECLint check + run: editorconfig-checker -exclude README.md $(git ls-files | grep -v test) YAML: runs-on: ubuntu-latest @@ -84,7 +98,6 @@ jobs: repo-token: ${{ secrets.GITHUB_TOKEN }} allow-repeats: false - nf-core: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml deleted file mode 100644 index 4238dfaf..00000000 --- a/.github/workflows/push_dockerhub_dev.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: nf-core Docker push (dev) -# This builds the docker image and pushes it to DockerHub -# Runs on nf-core repo releases and push event to 'dev' branch (PR merges) -on: - push: - branches: - - dev - -jobs: - push_dockerhub: - name: Push new Docker image to Docker Hub (dev) - runs-on: ubuntu-latest - # Only run for the nf-core repo, for releases and merged PRs - if: ${{ github.repository == 'nf-core/mhcquant' }} - env: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} - steps: - - name: Check out pipeline code - uses: actions/checkout@v2 - - - name: Build new docker image - run: docker build --no-cache . -t nfcore/mhcquant:dev - - - name: Push Docker image to DockerHub (dev) - run: | - echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nfcore/mhcquant:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml deleted file mode 100644 index 4ae08515..00000000 --- a/.github/workflows/push_dockerhub_release.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: nf-core Docker push (release) -# This builds the docker image and pushes it to DockerHub -# Runs on nf-core repo releases and push event to 'dev' branch (PR merges) -on: - release: - types: [published] - -jobs: - push_dockerhub: - name: Push new Docker image to Docker Hub (release) - runs-on: ubuntu-latest - # Only run for the nf-core repo, for releases and merged PRs - if: ${{ github.repository == 'nf-core/mhcquant' }} - env: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_PASS: ${{ secrets.DOCKERHUB_PASS }} - steps: - - name: Check out pipeline code - uses: actions/checkout@v2 - - - name: Build new docker image - run: docker build --no-cache . -t nfcore/mhcquant:latest - - - name: Push Docker image to DockerHub (release) - run: | - echo "$DOCKERHUB_PASS" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin - docker push nfcore/mhcquant:latest - docker tag nfcore/mhcquant:latest nfcore/mhcquant:${{ github.event.release.tag_name }} - docker push nfcore/mhcquant:${{ github.event.release.tag_name }} diff --git a/.gitignore b/.gitignore index aa4bb5b3..5124c9ac 100644 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,6 @@ work/ data/ results/ .DS_Store -tests/ testing/ testing* *.pyc diff --git a/.github/markdownlint.yml b/.markdownlint.yml similarity index 90% rename from .github/markdownlint.yml rename to .markdownlint.yml index 8d7eb53b..9e605fcf 100644 --- a/.github/markdownlint.yml +++ b/.markdownlint.yml @@ -1,6 +1,8 @@ # Markdownlint configuration file default: true line-length: false +ul-indent: + indent: 4 no-duplicate-header: siblings_only: true no-inline-html: diff --git a/CITATIONS.md b/CITATIONS.md new file mode 100644 index 00000000..71346781 --- /dev/null +++ b/CITATIONS.md @@ -0,0 +1,32 @@ +# nf-core/mhcquant: Citations + +## [nf-core](https://pubmed.ncbi.nlm.nih.gov/32055031/) + +> Ewels PA, Peltzer A, Fillinger S, Patel H, Alneberg J, Wilm A, Garcia MU, Di Tommaso P, Nahnsen S. The nf-core framework for community-curated bioinformatics pipelines. Nat Biotechnol. 2020 Mar;38(3):276-278. doi: 10.1038/s41587-020-0439-x. PubMed PMID: 32055031. + +## [Nextflow](https://pubmed.ncbi.nlm.nih.gov/28398311/) + +> Di Tommaso P, Chatzou M, Floden EW, Barja PP, Palumbo E, Notredame C. Nextflow enables reproducible computational workflows. Nat Biotechnol. 2017 Apr 11;35(4):316-319. doi: 10.1038/nbt.3820. PubMed PMID: 28398311. + +## Pipeline tools + +* [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) + +* [MultiQC](https://www.ncbi.nlm.nih.gov/pubmed/27312411/) + > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. + +## Software packaging/containerisation tools + +* [Anaconda](https://anaconda.com) + > Anaconda Software Distribution. Computer software. Vers. 2-2.4.0. Anaconda, Nov. 2016. Web. + +* [Bioconda](https://pubmed.ncbi.nlm.nih.gov/29967506/) + > Grüning B, Dale R, Sjödin A, Chapman BA, Rowe J, Tomkins-Tinch CH, Valieris R, Köster J; Bioconda Team. Bioconda: sustainable and comprehensive software distribution for the life sciences. Nat Methods. 2018 Jul;15(7):475-476. doi: 10.1038/s41592-018-0046-7. PubMed PMID: 29967506. + +* [BioContainers](https://pubmed.ncbi.nlm.nih.gov/28379341/) + > da Veiga Leprevost F, Grüning B, Aflitos SA, Röst HL, Uszkoreit J, Barsnes H, Vaudel M, Moreno P, Gatto L, Weber J, Bai M, Jimenez RC, Sachsenberg T, Pfeuffer J, Alvarez RV, Griss J, Nesvizhskii AI, Perez-Riverol Y. BioContainers: an open-source and community-driven framework for software standardization. Bioinformatics. 2017 Aug 15;33(16):2580-2582. doi: 10.1093/bioinformatics/btx192. PubMed PMID: 28379341; PubMed Central PMCID: PMC5870671. + +* [Docker](https://dl.acm.org/doi/10.5555/2600239.2600241) + +* [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) + > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 0fe57ab7..00000000 --- a/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM nfcore/base:1.14 -LABEL authors="Leon Bichmann" \ - description="Docker image containing all software requirements for the nf-core/mhcquant pipeline" - -# Install the conda environment -COPY environment.yml / -RUN conda env create --quiet -f /environment.yml && conda clean -a - -# Add conda installation dir to PATH (instead of doing 'conda activate') -ENV PATH /opt/conda/envs/nf-core-mhcquant-1.6.1/bin:$PATH - -# Dump the details of the installed packages to a file for posterity -RUN conda env export --name nf-core-mhcquant-1.6.1 > nf-core-mhcquant-1.6.1.yml diff --git a/README.md b/README.md index 470985c0..205bfe0f 100644 --- a/README.md +++ b/README.md @@ -1,67 +1,69 @@ # ![nf-core/mhcquant](docs/images/nf-core-mhcquant_logo.png) -**Identify and quantify peptides from mass spectrometry raw data**. +[![GitHub Actions CI Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/mhcquant/actions?query=workflow%3A%22nf-core+CI%22) +[![GitHub Actions Linting Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/mhcquant/actions?query=workflow%3A%22nf-core+linting%22) +[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/mhcquant/results) +[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) -[![GitHub Actions CI Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/mhcquant/actions) -[![GitHub Actions Linting Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/mhcquant/actions) -[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A520.04.0-brightgreen.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) +[![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) +[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) +[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) -[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/) -[![Docker](https://img.shields.io/docker/automated/nfcore/mhcquant.svg)](https://hub.docker.com/r/nfcore/mhcquant) -[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23mhcquant-4A154B?logo=slack)](https://nfcore.slack.com/channels/mhcquant) +[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23mhcquant-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/mhcquant) +[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core) +[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) ## Introduction -**nf-core/mhcquant** is a bioinformatics best-practise analysis pipeline for +**nf-core/mhcquant** is a bioinformatics best-practice analysis pipeline for Identify and quantify peptides from mass spectrometry raw data. -The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker containers making installation trivial and results highly reproducible. +The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It uses Docker/Singularity containers making installation trivial and results highly reproducible. The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. Where possible, these processes have been submitted to and installed from [nf-core/modules](https://github.com/nf-core/modules) in order to make them available to all nf-core pipelines, and to everyone within the Nextflow community! + + +On release, automated continuous integration tests run the pipeline on a full-sized dataset on the AWS cloud infrastructure. This ensures that the pipeline runs on AWS, has sensible resource allocation defaults set to run on real-world datasets, and permits the persistent storage of results to benchmark between pipeline releases and other analysis sources. The results obtained from the full-sized test can be viewed on the [nf-core website](https://nf-co.re/mhcquant/results). + +## Pipeline summary + + + +1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)) +2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)) ## Quick Start -1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=20.04.0`) +1. Install [`Nextflow`](https://nf-co.re/usage/installation) (`>=21.04.0`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_ 3. Download the pipeline and test it on a minimal dataset with a single command: - ```bash + ```console nextflow run nf-core/mhcquant -profile test, ``` - > Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile ` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment. + > * Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile ` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment. + > * If you are using `singularity` then the pipeline will auto-detect this and attempt to download the Singularity images directly as opposed to performing a conversion from Docker images. If you are persistently observing issues downloading Singularity images directly due to timeout or network issues then please use the `--singularity_pull_docker_container` parameter to pull and convert the Docker image instead. Alternatively, it is highly recommended to use the [`nf-core download`](https://nf-co.re/tools/#downloading-pipelines-for-offline-use) command to pre-download all of the required containers before running the pipeline and to set the [`NXF_SINGULARITY_CACHEDIR` or `singularity.cacheDir`](https://www.nextflow.io/docs/latest/singularity.html?#singularity-docker-hub) Nextflow options to be able to store and re-use the images from a central location for future pipeline runs. + > * If you are using `conda`, it is highly recommended to use the [`NXF_CONDA_CACHEDIR` or `conda.cacheDir`](https://www.nextflow.io/docs/latest/conda.html) settings to store the environments in a central location for future pipeline runs. 4. Start running your own analysis! - ```bash - nextflow run nf-core/mhcquant -profile --input '*_R{1,2}.fastq.gz' --genome GRCh37 + ```console + nextflow run nf-core/mhcquant -profile --input samplesheet.csv --genome GRCh37 ``` -See [usage docs](https://nf-co.re/mhcquant/usage) for all of the available options when running the pipeline. - -## Pipeline Summary - -By default, the pipeline currently performs the following: - - - -* Sequencing quality control (`FastQC`) -* Overall pipeline run summaries (`MultiQC`) - ## Documentation -The nf-core/mhcquant pipeline comes with documentation about the pipeline: [usage](https://nf-co.re/mhcquant/usage) and [output](https://nf-co.re/mhcquant/output). - - +The nf-core/mhcquant pipeline comes with documentation about the pipeline [usage](https://nf-co.re/mhcquant/usage), [parameters](https://nf-co.re/mhcquant/parameters) and [output](https://nf-co.re/mhcquant/output). ## Credits nf-core/mhcquant was originally written by Leon Bichmann. -We thank the following people for their extensive assistance in the development -of this pipeline: +We thank the following people for their extensive assistance in the development of this pipeline: @@ -73,9 +75,12 @@ For further information or help, don't hesitate to get in touch on the [Slack `# ## Citations - + + +An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. + You can cite the `nf-core` publication as follows: > **The nf-core framework for community-curated bioinformatics pipelines.** @@ -83,7 +88,3 @@ You can cite the `nf-core` publication as follows: > Philip Ewels, Alexander Peltzer, Sven Fillinger, Harshil Patel, Johannes Alneberg, Andreas Wilm, Maxime Ulysse Garcia, Paolo Di Tommaso & Sven Nahnsen. > > _Nat Biotechnol._ 2020 Feb 13. doi: [10.1038/s41587-020-0439-x](https://dx.doi.org/10.1038/s41587-020-0439-x). - -In addition, references of tools and data used in this pipeline are as follows: - - diff --git a/assets/samplesheet.csv b/assets/samplesheet.csv new file mode 100644 index 00000000..5f653ab7 --- /dev/null +++ b/assets/samplesheet.csv @@ -0,0 +1,3 @@ +sample,fastq_1,fastq_2 +SAMPLE_PAIRED_END,/path/to/fastq/files/AEG588A1_S1_L002_R1_001.fastq.gz,/path/to/fastq/files/AEG588A1_S1_L002_R2_001.fastq.gz +SAMPLE_SINGLE_END,/path/to/fastq/files/AEG588A4_S4_L003_R1_001.fastq.gz, diff --git a/assets/schema_input.json b/assets/schema_input.json new file mode 100644 index 00000000..7f5dc79e --- /dev/null +++ b/assets/schema_input.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://raw.githubusercontent.com/nf-core/mhcquant/master/assets/schema_input.json", + "title": "nf-core/mhcquant pipeline - params.input schema", + "description": "Schema for the file provided with params.input", + "type": "array", + "items": { + "type": "object", + "properties": { + "sample": { + "type": "string", + "pattern": "^\\S+$", + "errorMessage": "Sample name must be provided and cannot contain spaces" + }, + "fastq_1": { + "type": "string", + "pattern": "^\\S+\\.f(ast)?q\\.gz$", + "errorMessage": "FastQ file for reads 1 must be provided, cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" + }, + "fastq_2": { + "errorMessage": "FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'", + "anyOf": [ + { + "type": "string", + "pattern": "^\\S+\\.f(ast)?q\\.gz$" + }, + { + "type": "string", + "maxLength": 0 + } + ] + } + }, + "required": [ + "sample", + "fastq_1" + ] + } +} diff --git a/assets/sendmail_template.txt b/assets/sendmail_template.txt index e20bce6b..43aaddfb 100644 --- a/assets/sendmail_template.txt +++ b/assets/sendmail_template.txt @@ -15,15 +15,15 @@ Content-ID: Content-Disposition: inline; filename="nf-core-mhcquant_logo.png" <% out << new File("$projectDir/assets/nf-core-mhcquant_logo.png"). - bytes. - encodeBase64(). - toString(). - tokenize( '\n' )*. - toList()*. - collate( 76 )*. - collect { it.join() }. - flatten(). - join( '\n' ) %> + bytes. + encodeBase64(). + toString(). + tokenize( '\n' )*. + toList()*. + collate( 76 )*. + collect { it.join() }. + flatten(). + join( '\n' ) %> <% if (mqcFile){ @@ -37,15 +37,15 @@ Content-ID: Content-Disposition: attachment; filename=\"${mqcFileObj.getName()}\" ${mqcFileObj. - bytes. - encodeBase64(). - toString(). - tokenize( '\n' )*. - toList()*. - collate( 76 )*. - collect { it.join() }. - flatten(). - join( '\n' )} + bytes. + encodeBase64(). + toString(). + tokenize( '\n' )*. + toList()*. + collate( 76 )*. + collect { it.join() }. + flatten(). + join( '\n' )} """ }} %> diff --git a/bin/check_samplesheet.py b/bin/check_samplesheet.py new file mode 100755 index 00000000..0b640769 --- /dev/null +++ b/bin/check_samplesheet.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python + +# TODO nf-core: Update the script to check the samplesheet +# This script is based on the example at: https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv + +import os +import sys +import errno +import argparse + + +def parse_args(args=None): + Description = "Reformat nf-core/mhcquant samplesheet file and check its contents." + Epilog = "Example usage: python check_samplesheet.py " + + parser = argparse.ArgumentParser(description=Description, epilog=Epilog) + parser.add_argument("FILE_IN", help="Input samplesheet file.") + parser.add_argument("FILE_OUT", help="Output file.") + return parser.parse_args(args) + + +def make_dir(path): + if len(path) > 0: + try: + os.makedirs(path) + except OSError as exception: + if exception.errno != errno.EEXIST: + raise exception + + +def print_error(error, context="Line", context_str=""): + error_str = "ERROR: Please check samplesheet -> {}".format(error) + if context != "" and context_str != "": + error_str = "ERROR: Please check samplesheet -> {}\n{}: '{}'".format( + error, context.strip(), context_str.strip() + ) + print(error_str) + sys.exit(1) + + +# TODO nf-core: Update the check_samplesheet function +def check_samplesheet(file_in, file_out): + """ + This function checks that the samplesheet follows the following structure: + + sample,fastq_1,fastq_2 + SAMPLE_PE,SAMPLE_PE_RUN1_1.fastq.gz,SAMPLE_PE_RUN1_2.fastq.gz + SAMPLE_PE,SAMPLE_PE_RUN2_1.fastq.gz,SAMPLE_PE_RUN2_2.fastq.gz + SAMPLE_SE,SAMPLE_SE_RUN1_1.fastq.gz, + + For an example see: + https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv + """ + + sample_mapping_dict = {} + with open(file_in, "r") as fin: + + ## Check header + MIN_COLS = 2 + # TODO nf-core: Update the column names for the input samplesheet + HEADER = ["sample", "fastq_1", "fastq_2"] + header = [x.strip('"') for x in fin.readline().strip().split(",")] + if header[: len(HEADER)] != HEADER: + print("ERROR: Please check samplesheet header -> {} != {}".format(",".join(header), ",".join(HEADER))) + sys.exit(1) + + ## Check sample entries + for line in fin: + lspl = [x.strip().strip('"') for x in line.strip().split(",")] + + # Check valid number of columns per row + if len(lspl) < len(HEADER): + print_error( + "Invalid number of columns (minimum = {})!".format(len(HEADER)), + "Line", + line, + ) + num_cols = len([x for x in lspl if x]) + if num_cols < MIN_COLS: + print_error( + "Invalid number of populated columns (minimum = {})!".format(MIN_COLS), + "Line", + line, + ) + + ## Check sample name entries + sample, fastq_1, fastq_2 = lspl[: len(HEADER)] + sample = sample.replace(" ", "_") + if not sample: + print_error("Sample entry has not been specified!", "Line", line) + + ## Check FastQ file extension + for fastq in [fastq_1, fastq_2]: + if fastq: + if fastq.find(" ") != -1: + print_error("FastQ file contains spaces!", "Line", line) + if not fastq.endswith(".fastq.gz") and not fastq.endswith(".fq.gz"): + print_error( + "FastQ file does not have extension '.fastq.gz' or '.fq.gz'!", + "Line", + line, + ) + + ## Auto-detect paired-end/single-end + sample_info = [] ## [single_end, fastq_1, fastq_2] + if sample and fastq_1 and fastq_2: ## Paired-end short reads + sample_info = ["0", fastq_1, fastq_2] + elif sample and fastq_1 and not fastq_2: ## Single-end short reads + sample_info = ["1", fastq_1, fastq_2] + else: + print_error("Invalid combination of columns provided!", "Line", line) + + ## Create sample mapping dictionary = { sample: [ single_end, fastq_1, fastq_2 ] } + if sample not in sample_mapping_dict: + sample_mapping_dict[sample] = [sample_info] + else: + if sample_info in sample_mapping_dict[sample]: + print_error("Samplesheet contains duplicate rows!", "Line", line) + else: + sample_mapping_dict[sample].append(sample_info) + + ## Write validated samplesheet with appropriate columns + if len(sample_mapping_dict) > 0: + out_dir = os.path.dirname(file_out) + make_dir(out_dir) + with open(file_out, "w") as fout: + fout.write(",".join(["sample", "single_end", "fastq_1", "fastq_2"]) + "\n") + for sample in sorted(sample_mapping_dict.keys()): + + ## Check that multiple runs of the same sample are of the same datatype + if not all(x[0] == sample_mapping_dict[sample][0][0] for x in sample_mapping_dict[sample]): + print_error("Multiple runs of a sample must be of the same datatype!", "Sample: {}".format(sample)) + + for idx, val in enumerate(sample_mapping_dict[sample]): + fout.write(",".join(["{}_T{}".format(sample, idx + 1)] + val) + "\n") + else: + print_error("No entries to process!", "Samplesheet: {}".format(file_in)) + + +def main(args=None): + args = parse_args(args) + check_samplesheet(args.FILE_IN, args.FILE_OUT) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/bin/markdown_to_html.py b/bin/markdown_to_html.py deleted file mode 100755 index a26d1ff5..00000000 --- a/bin/markdown_to_html.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python -from __future__ import print_function -import argparse -import markdown -import os -import sys -import io - - -def convert_markdown(in_fn): - input_md = io.open(in_fn, mode="r", encoding="utf-8").read() - html = markdown.markdown( - "[TOC]\n" + input_md, - extensions=["pymdownx.extra", "pymdownx.b64", "pymdownx.highlight", "pymdownx.emoji", "pymdownx.tilde", "toc"], - extension_configs={ - "pymdownx.b64": {"base_path": os.path.dirname(in_fn)}, - "pymdownx.highlight": {"noclasses": True}, - "toc": {"title": "Table of Contents"}, - }, - ) - return html - - -def wrap_html(contents): - header = """ - - - - - -
- """ - footer = """ -
- - - """ - return header + contents + footer - - -def parse_args(args=None): - parser = argparse.ArgumentParser() - parser.add_argument("mdfile", type=argparse.FileType("r"), nargs="?", help="File to convert. Defaults to stdin.") - parser.add_argument( - "-o", "--out", type=argparse.FileType("w"), default=sys.stdout, help="Output file name. Defaults to stdout." - ) - return parser.parse_args(args) - - -def main(args=None): - args = parse_args(args) - converted_md = convert_markdown(args.mdfile.name) - html = wrap_html(converted_md) - args.out.write(html) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/bin/scrape_software_versions.py b/bin/scrape_software_versions.py index 41042284..8a5bd9ed 100755 --- a/bin/scrape_software_versions.py +++ b/bin/scrape_software_versions.py @@ -1,36 +1,18 @@ #!/usr/bin/env python from __future__ import print_function -from collections import OrderedDict -import re +import os -# TODO nf-core: Add additional regexes for new tools in process get_software_versions -regexes = { - "nf-core/mhcquant": ["v_pipeline.txt", r"(\S+)"], - "Nextflow": ["v_nextflow.txt", r"(\S+)"], - "FastQC": ["v_fastqc.txt", r"FastQC v(\S+)"], - "MultiQC": ["v_multiqc.txt", r"multiqc, version (\S+)"], -} -results = OrderedDict() -results["nf-core/mhcquant"] = 'N/A' -results["Nextflow"] = 'N/A' -results["FastQC"] = 'N/A' -results["MultiQC"] = 'N/A' +results = {} +version_files = [x for x in os.listdir(".") if x.endswith(".version.txt")] +for version_file in version_files: -# Search each file using its regex -for k, v in regexes.items(): - try: - with open(v[0]) as x: - versions = x.read() - match = re.search(v[1], versions) - if match: - results[k] = "v{}".format(match.group(1)) - except IOError: - results[k] = False + software = version_file.replace(".version.txt", "") + if software == "pipeline": + software = "nf-core/mhcquant" -# Remove software set to false in results -for k in list(results): - if not results[k]: - del results[k] + with open(version_file) as fin: + version = fin.read().strip() + results[software] = version # Dump to YAML print( @@ -44,11 +26,11 @@
""" ) -for k, v in results.items(): +for k, v in sorted(results.items()): print("
{}
{}
".format(k, v)) print("
") # Write out regexes as csv file: -with open("software_versions.csv", "w") as f: - for k, v in results.items(): +with open("software_versions.tsv", "w") as f: + for k, v in sorted(results.items()): f.write("{}\t{}\n".format(k, v)) diff --git a/conf/base.config b/conf/base.config index d6d5e05d..d184fc65 100644 --- a/conf/base.config +++ b/conf/base.config @@ -1,51 +1,57 @@ /* - * ------------------------------------------------- - * nf-core/mhcquant Nextflow base config file - * ------------------------------------------------- - * A 'blank slate' config file, appropriate for general - * use on most high performace compute environments. - * Assumes that all software is installed and available - * on the PATH. Runs in `local` mode - all jobs will be - * run on the logged in environment. - */ +======================================================================================== + nf-core/mhcquant Nextflow base config file +======================================================================================== + A 'blank slate' config file, appropriate for general use on most high performance + compute environments. Assumes that all software is installed and available on + the PATH. Runs in `local` mode - all jobs will be run on the logged in environment. +---------------------------------------------------------------------------------------- +*/ process { - // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 7.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + // TODO nf-core: Check the defaults for all processes + cpus = { check_max( 1 * task.attempt, 'cpus' ) } + memory = { check_max( 6.GB * task.attempt, 'memory' ) } + time = { check_max( 4.h * task.attempt, 'time' ) } - errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' } - maxRetries = 1 - maxErrors = '-1' - - // Process-specific resource requirements - // NOTE - Only one of the labels below are used in the fastqc process in the main script. - // If possible, it would be nice to keep the same label naming convention when - // adding in your processes. - // TODO nf-core: Customise requirements for specific processes. - // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors - withLabel:process_low { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 14.GB * task.attempt, 'memory' ) } - time = { check_max( 6.h * task.attempt, 'time' ) } - } - withLabel:process_medium { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 42.GB * task.attempt, 'memory' ) } - time = { check_max( 8.h * task.attempt, 'time' ) } - } - withLabel:process_high { - cpus = { check_max( 12 * task.attempt, 'cpus' ) } - memory = { check_max( 84.GB * task.attempt, 'memory' ) } - time = { check_max( 10.h * task.attempt, 'time' ) } - } - withLabel:process_long { - time = { check_max( 20.h * task.attempt, 'time' ) } - } - withName:get_software_versions { - cache = false - } + errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' } + maxRetries = 1 + maxErrors = '-1' + // Process-specific resource requirements + // NOTE - Please try and re-use the labels below as much as possible. + // These labels are used and recognised by default in DSL2 files hosted on nf-core/modules. + // If possible, it would be nice to keep the same label naming convention when + // adding in your local modules too. + // TODO nf-core: Customise requirements for specific processes. + // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors + withLabel:process_low { + cpus = { check_max( 2 * task.attempt, 'cpus' ) } + memory = { check_max( 12.GB * task.attempt, 'memory' ) } + time = { check_max( 4.h * task.attempt, 'time' ) } + } + withLabel:process_medium { + cpus = { check_max( 6 * task.attempt, 'cpus' ) } + memory = { check_max( 36.GB * task.attempt, 'memory' ) } + time = { check_max( 8.h * task.attempt, 'time' ) } + } + withLabel:process_high { + cpus = { check_max( 12 * task.attempt, 'cpus' ) } + memory = { check_max( 72.GB * task.attempt, 'memory' ) } + time = { check_max( 16.h * task.attempt, 'time' ) } + } + withLabel:process_long { + time = { check_max( 20.h * task.attempt, 'time' ) } + } + withLabel:process_high_memory { + memory = { check_max( 200.GB * task.attempt, 'memory' ) } + } + withLabel:error_ignore { + errorStrategy = 'ignore' + } + withLabel:error_retry { + errorStrategy = 'retry' + maxRetries = 2 + } } diff --git a/conf/igenomes.config b/conf/igenomes.config index 31b7ee61..855948de 100644 --- a/conf/igenomes.config +++ b/conf/igenomes.config @@ -1,421 +1,432 @@ /* - * ------------------------------------------------- - * Nextflow config file for iGenomes paths - * ------------------------------------------------- - * Defines reference genomes, using iGenome paths - * Can be used by any config that customises the base - * path using $params.igenomes_base / --igenomes_base - */ +======================================================================================== + Nextflow config file for iGenomes paths +======================================================================================== + Defines reference genomes using iGenome paths. + Can be used by any config that customises the base path using: + $params.igenomes_base / --igenomes_base +---------------------------------------------------------------------------------------- +*/ params { - // illumina iGenomes reference file paths - genomes { - 'GRCh37' { - fasta = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/README.txt" - mito_name = "MT" - macs_gsize = "2.7e9" - blacklist = "${projectDir}/assets/blacklists/GRCh37-blacklist.bed" + // illumina iGenomes reference file paths + genomes { + 'GRCh37' { + fasta = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/README.txt" + mito_name = "MT" + macs_gsize = "2.7e9" + blacklist = "${projectDir}/assets/blacklists/GRCh37-blacklist.bed" + } + 'GRCh38' { + fasta = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.bed" + mito_name = "chrM" + macs_gsize = "2.7e9" + blacklist = "${projectDir}/assets/blacklists/hg38-blacklist.bed" + } + 'GRCm38' { + fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/README.txt" + mito_name = "MT" + macs_gsize = "1.87e9" + blacklist = "${projectDir}/assets/blacklists/GRCm38-blacklist.bed" + } + 'TAIR10' { + fasta = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/README.txt" + mito_name = "Mt" + } + 'EB2' { + fasta = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/README.txt" + } + 'UMD3.1' { + fasta = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/README.txt" + mito_name = "MT" + } + 'WBcel235' { + fasta = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed" + mito_name = "MtDNA" + macs_gsize = "9e7" + } + 'CanFam3.1' { + fasta = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/README.txt" + mito_name = "MT" + } + 'GRCz10' { + fasta = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed" + mito_name = "MT" + } + 'BDGP6' { + fasta = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed" + mito_name = "M" + macs_gsize = "1.2e8" + } + 'EquCab2' { + fasta = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/README.txt" + mito_name = "MT" + } + 'EB1' { + fasta = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/README.txt" + } + 'Galgal4' { + fasta = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed" + mito_name = "MT" + } + 'Gm01' { + fasta = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/README.txt" + } + 'Mmul_1' { + fasta = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/README.txt" + mito_name = "MT" + } + 'IRGSP-1.0' { + fasta = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed" + mito_name = "Mt" + } + 'CHIMP2.1.4' { + fasta = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/README.txt" + mito_name = "MT" + } + 'Rnor_5.0' { + fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Annotation/Genes/genes.bed" + mito_name = "MT" + } + 'Rnor_6.0' { + fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed" + mito_name = "MT" + } + 'R64-1-1' { + fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed" + mito_name = "MT" + macs_gsize = "1.2e7" + } + 'EF2' { + fasta = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/README.txt" + mito_name = "MT" + macs_gsize = "1.21e7" + } + 'Sbi1' { + fasta = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/README.txt" + } + 'Sscrofa10.2' { + fasta = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/README.txt" + mito_name = "MT" + } + 'AGPv3' { + fasta = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed" + mito_name = "Mt" + } + 'hg38' { + fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.bed" + mito_name = "chrM" + macs_gsize = "2.7e9" + blacklist = "${projectDir}/assets/blacklists/hg38-blacklist.bed" + } + 'hg19' { + fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/README.txt" + mito_name = "chrM" + macs_gsize = "2.7e9" + blacklist = "${projectDir}/assets/blacklists/hg19-blacklist.bed" + } + 'mm10' { + fasta = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/README.txt" + mito_name = "chrM" + macs_gsize = "1.87e9" + blacklist = "${projectDir}/assets/blacklists/mm10-blacklist.bed" + } + 'bosTau8' { + fasta = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.bed" + mito_name = "chrM" + } + 'ce10' { + fasta = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/README.txt" + mito_name = "chrM" + macs_gsize = "9e7" + } + 'canFam3' { + fasta = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/README.txt" + mito_name = "chrM" + } + 'danRer10' { + fasta = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.bed" + mito_name = "chrM" + macs_gsize = "1.37e9" + } + 'dm6' { + fasta = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.bed" + mito_name = "chrM" + macs_gsize = "1.2e8" + } + 'equCab2' { + fasta = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/README.txt" + mito_name = "chrM" + } + 'galGal4' { + fasta = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/README.txt" + mito_name = "chrM" + } + 'panTro4' { + fasta = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/README.txt" + mito_name = "chrM" + } + 'rn6' { + fasta = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.bed" + mito_name = "chrM" + } + 'sacCer3' { + fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BismarkIndex/" + readme = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Annotation/README.txt" + mito_name = "chrM" + macs_gsize = "1.2e7" + } + 'susScr3' { + fasta = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BWAIndex/genome.fa" + bowtie2 = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/Bowtie2Index/" + star = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/STARIndex/" + bismark = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BismarkIndex/" + gtf = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.gtf" + bed12 = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.bed" + readme = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/README.txt" + mito_name = "chrM" + } } - 'GRCh38' { - fasta = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.bed" - mito_name = "chrM" - macs_gsize = "2.7e9" - blacklist = "${projectDir}/assets/blacklists/hg38-blacklist.bed" - } - 'GRCm38' { - fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/README.txt" - mito_name = "MT" - macs_gsize = "1.87e9" - blacklist = "${projectDir}/assets/blacklists/GRCm38-blacklist.bed" - } - 'TAIR10' { - fasta = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/README.txt" - mito_name = "Mt" - } - 'EB2' { - fasta = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/README.txt" - } - 'UMD3.1' { - fasta = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/README.txt" - mito_name = "MT" - } - 'WBcel235' { - fasta = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed" - mito_name = "MtDNA" - macs_gsize = "9e7" - } - 'CanFam3.1' { - fasta = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/README.txt" - mito_name = "MT" - } - 'GRCz10' { - fasta = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed" - mito_name = "MT" - } - 'BDGP6' { - fasta = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed" - mito_name = "M" - macs_gsize = "1.2e8" - } - 'EquCab2' { - fasta = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/README.txt" - mito_name = "MT" - } - 'EB1' { - fasta = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/README.txt" - } - 'Galgal4' { - fasta = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed" - mito_name = "MT" - } - 'Gm01' { - fasta = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/README.txt" - } - 'Mmul_1' { - fasta = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/README.txt" - mito_name = "MT" - } - 'IRGSP-1.0' { - fasta = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed" - mito_name = "Mt" - } - 'CHIMP2.1.4' { - fasta = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/README.txt" - mito_name = "MT" - } - 'Rnor_6.0' { - fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed" - mito_name = "MT" - } - 'R64-1-1' { - fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed" - mito_name = "MT" - macs_gsize = "1.2e7" - } - 'EF2' { - fasta = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/README.txt" - mito_name = "MT" - macs_gsize = "1.21e7" - } - 'Sbi1' { - fasta = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/README.txt" - } - 'Sscrofa10.2' { - fasta = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/README.txt" - mito_name = "MT" - } - 'AGPv3' { - fasta = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed" - mito_name = "Mt" - } - 'hg38' { - fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.bed" - mito_name = "chrM" - macs_gsize = "2.7e9" - blacklist = "${projectDir}/assets/blacklists/hg38-blacklist.bed" - } - 'hg19' { - fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/README.txt" - mito_name = "chrM" - macs_gsize = "2.7e9" - blacklist = "${projectDir}/assets/blacklists/hg19-blacklist.bed" - } - 'mm10' { - fasta = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/README.txt" - mito_name = "chrM" - macs_gsize = "1.87e9" - blacklist = "${projectDir}/assets/blacklists/mm10-blacklist.bed" - } - 'bosTau8' { - fasta = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.bed" - mito_name = "chrM" - } - 'ce10' { - fasta = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/README.txt" - mito_name = "chrM" - macs_gsize = "9e7" - } - 'canFam3' { - fasta = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/README.txt" - mito_name = "chrM" - } - 'danRer10' { - fasta = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.bed" - mito_name = "chrM" - macs_gsize = "1.37e9" - } - 'dm6' { - fasta = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.bed" - mito_name = "chrM" - macs_gsize = "1.2e8" - } - 'equCab2' { - fasta = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/README.txt" - mito_name = "chrM" - } - 'galGal4' { - fasta = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/README.txt" - mito_name = "chrM" - } - 'panTro4' { - fasta = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/README.txt" - mito_name = "chrM" - } - 'rn6' { - fasta = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.bed" - mito_name = "chrM" - } - 'sacCer3' { - fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BismarkIndex/" - readme = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Annotation/README.txt" - mito_name = "chrM" - macs_gsize = "1.2e7" - } - 'susScr3' { - fasta = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/README.txt" - mito_name = "chrM" - } - } } diff --git a/conf/modules.config b/conf/modules.config new file mode 100644 index 00000000..0b1bfdec --- /dev/null +++ b/conf/modules.config @@ -0,0 +1,32 @@ +/* +======================================================================================== + Config file for defining DSL2 per module options +======================================================================================== + Available keys to override module options: + args = Additional arguments appended to command in module. + args2 = Second set of arguments appended to command in module (multi-tool modules). + args3 = Third set of arguments appended to command in module (multi-tool modules). + publish_dir = Directory to publish results. + publish_by_meta = Groovy list of keys available in meta map to append as directories to "publish_dir" path + If publish_by_meta = true - Value of ${meta['id']} is appended as a directory to "publish_dir" path + If publish_by_meta = ['id', 'custompath'] - If "id" is in meta map and "custompath" isn't then "${meta['id']}/custompath/" + is appended as a directory to "publish_dir" path + If publish_by_meta = false / null - No directories are appended to "publish_dir" path + publish_files = Groovy map where key = "file_ext" and value = "directory" to publish results for that file extension + The value of "directory" is appended to the standard "publish_dir" path as defined above. + If publish_files = null (unspecified) - All files are published. + If publish_files = false - No files are published. + suffix = File name suffix for output files. +---------------------------------------------------------------------------------------- +*/ + +params { + modules { + 'fastqc' { + args = "--quiet" + } + 'multiqc' { + args = "" + } + } +} diff --git a/conf/test.config b/conf/test.config index fa8d6fac..1cb53a57 100644 --- a/conf/test.config +++ b/conf/test.config @@ -1,28 +1,29 @@ /* - * ------------------------------------------------- - * Nextflow config file for running tests - * ------------------------------------------------- - * Defines bundled input files and everything required - * to run a fast and simple test. Use as follows: - * nextflow run nf-core/mhcquant -profile test, - */ +======================================================================================== + Nextflow config file for running minimal tests +======================================================================================== + Defines input files and everything required to run a fast and simple pipeline test. + + Use as follows: + nextflow run nf-core/mhcquant -profile test, + +---------------------------------------------------------------------------------------- +*/ params { - config_profile_name = 'Test profile' - config_profile_description = 'Minimal test dataset to check pipeline function' - // Limit resources so that this can run on GitHub Actions - max_cpus = 2 - max_memory = 6.GB - max_time = 48.h + config_profile_name = 'Test profile' + config_profile_description = 'Minimal test dataset to check pipeline function' + + // Limit resources so that this can run on GitHub Actions + max_cpus = 2 + max_memory = 6.GB + max_time = 6.h + + // Input data + // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets + // TODO nf-core: Give any required params for the test so that command line flags are not needed + input = 'https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv' - // Input data - // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets - // TODO nf-core: Give any required params for the test so that command line flags are not needed - single_end = false - input_paths = [ - ['Testdata', ['https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R1.tiny.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R2.tiny.fastq.gz']], - ['SRR389222', ['https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub1.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub2.fastq.gz']] - ] - // Ignore `--input` as otherwise the parameter validation will throw an error - schema_ignore_params = 'genomes,input_paths,input' + // Genome references + genome = 'R64-1-1' } diff --git a/conf/test_full.config b/conf/test_full.config index 3cb5cbaf..24857680 100644 --- a/conf/test_full.config +++ b/conf/test_full.config @@ -1,24 +1,24 @@ /* - * ------------------------------------------------- - * Nextflow config file for running full-size tests - * ------------------------------------------------- - * Defines bundled input files and everything required - * to run a full size pipeline test. Use as follows: - * nextflow run nf-core/mhcquant -profile test_full, - */ +======================================================================================== + Nextflow config file for running full-size tests +======================================================================================== + Defines input files and everything required to run a full size pipeline test. + + Use as follows: + nextflow run nf-core/mhcquant -profile test_full, + +---------------------------------------------------------------------------------------- +*/ params { - config_profile_name = 'Full test profile' - config_profile_description = 'Full test dataset to check pipeline function' + config_profile_name = 'Full test profile' + config_profile_description = 'Full test dataset to check pipeline function' + + // Input data for full size test + // TODO nf-core: Specify the paths to your full test data ( on nf-core/test-datasets or directly in repositories, e.g. SRA) + // TODO nf-core: Give any required params for the test so that command line flags are not needed + input = 'https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_full_illumina_amplicon.csv' - // Input data for full size test - // TODO nf-core: Specify the paths to your full test data ( on nf-core/test-datasets or directly in repositories, e.g. SRA) - // TODO nf-core: Give any required params for the test so that command line flags are not needed - single_end = false - input_paths = [ - ['Testdata', ['https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R1.tiny.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/exoseq/testdata/Testdata_R2.tiny.fastq.gz']], - ['SRR389222', ['https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub1.fastq.gz', 'https://github.com/nf-core/test-datasets/raw/methylseq/testdata/SRR389222_sub2.fastq.gz']] - ] - // Ignore `--input` as otherwise the parameter validation will throw an error - schema_ignore_params = 'genomes,input_paths,input' + // Genome references + genome = 'R64-1-1' } diff --git a/docs/README.md b/docs/README.md index b3ebaa87..03f97690 100644 --- a/docs/README.md +++ b/docs/README.md @@ -3,8 +3,8 @@ The nf-core/mhcquant documentation is split into the following pages: * [Usage](usage.md) - * An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. + * An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. * [Output](output.md) - * An overview of the different results produced by the pipeline and how to interpret them. + * An overview of the different results produced by the pipeline and how to interpret them. You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) diff --git a/docs/images/mqc_fastqc_adapter.png b/docs/images/mqc_fastqc_adapter.png new file mode 100755 index 0000000000000000000000000000000000000000..361d0e47acfb424dea1f326590d1eb2f6dfa26b5 GIT binary patch literal 23458 zcmeFZ2UJtryD!S#x<#o93es(Ww4k)maRbte0-+a?-g^xY-3myTE`8G_KvA54)F1tn})nJ5u%TA4Y;^!^{48eL_}p#q-Umo0M|F1 z74+PQh^X8N|9_jcWbq~ zzn+tZC9B75nKdz=gQ8wo9GJ$P{D~3knlI_`-PRhCw34f1oYDLr^;oEbgxa#A^J%*2 z>FfDE*(~JzKFs$t_oeLz))qDU?s}%Q?7b~3Y;lUi^Oy-2@3g?joA4Wkgb6-2=ih*jub)~7yZ`T=L=Z`B`{1jhkB-iSjea94&Eo9A zxN59pv1p_}RO1>EC^q}Z2)ZI;b7JV_x4lMr=Bker2+EK;8~!;JO7re*@ZkDmoV878S*N^yX(F@U1yqt?Is3nnV>7}#(5pk`V3C) zWhB8;CwWIwsVIjH+`<9=YA(j&3DgQdFOOGU~*`36wNC&QDv8> zr?h2PQgnHkp&t^S)q^K!68h~`$PjZW&-Wns;Zlw$M2sc z1xR!u{m|Kih*|Hht#M@eOMM#8O*={^6b9k5B5^eBsrnhVHD7XZ5BWO&F?q(>Y=QFl z`f>yQ9NCoxZCH-1F{#mz_j{QeyY~4h*VeyYZ#S@Z(Pnb7G=ud!RW)5svqM*&GI_za zzn;8LkOTT?``1Ygt6w!2;5arK*o5k15cdIJnMg)IQhF_zVK%!ma$z&jL zZt>Q{!PqKl^`Qw?nJUOEm@@qX(y(TwSJ~dqW&M@7-N4Wk_wC4izx(xJMrmNjsl$XR zCyK&INt}7@FzNAbbg-nW)sJ>3->I1+2~YdlPsaS}^X-H0GR_CEsw`PGjpq`uX}8VP zJ)HC34>D(z{KR9;E&z=@?@q_|I{NPOj~g>w!$gR?Tlu~F+L$Mk%}xQEm+{&T(5zkH zacVy0k3w!T9r*p2sgX@V;^+PfUYUrEde07XSV=KSDbkIZU!j!Rk3MQV=h-!y@kWVB zdYkmu^fiU~pp#ixe4hBEMx7^LdHa z_L*14aVIHtrsR)SO?=&kQS&JR#^AVvln=P=bUXEIy$QB&!s34znCV@y(C%j9V=}SU zoYLHn+-Lalm0$-=QQ}a(+2dR*{DPF+)J4y!ukiA_T%dF zVKEk;c?LWheG#A5{A20}CKjMw5G%2}cT5@Oce=wqdobHC70=kY7}dxt3diH9(Zcwr zCabx8yObHQ@#e_wjl%wp8s_!Wvxe5f-Duin@obgt>qOcqN$$@{X^C_rEDh3fmM;|X z$zu4;D`{YRbaJ?o!KkazII&|th9v5MG2Mao$ytOHtW+wo;XJJdtLuGjg;d020qT++ zpD}e&o?SeKSqR`}4`OdkWNC7K)Wltn zbwBrWGM;bBGm8uP_RiqfwvDD1f+uRX>b=nTH9Y%vpg{ka0e*E>%<+3!G3#s*-1D>q zHg~1@BT52a*L>mVcP>6y*0iX8@!3tDFJLE+sRlnU(cl``hF`0Q>e4i6P8|wKmqIqI zoY+a0V*Bib0`F9nG#sR(8$^!IWLR)cE8@7XZTN%L-ucJ{9yijy)w5Pom%XG7V<^PX z$Z$U82w0qgcGmld-O6*e)?pm$g@!6`Pps5SPKccjDf(|vX9zcLs7t!7cyyckZI#R* z#lj(HqfVeqyZ+Va{)>65sAb3IQ%a{9W^_F!5!;w=XD}ZUHFH$8=Xjw+VE)s$q(nt> zE2^aDYki5`e73RQ=DxaBNZ6CK?XKCv@V}=y(g?YHnFaHfXnl}Lo;36@?471W;&#Se z>pE*@M{Y?CevLG8il9#HXG#W3>;o$1``EYBY5i<;JlBqj2M8Y2!+6bPj1(S_bOksY z<34UQE;=Z>KiL``pYd}5fpOOT)GJQnXfNiAc5wgJ>F|$Eqw&D*Vmz+#mM0oFD^`-^ zB~SXe{T+5hd$gnKd7Afo9cy&Lii@syPDFDK)^V{iWEAEO@?xzx1bd`ta z;$(vG+=i3~9|D=GX%f~<>eOVjy~-yRAhLf2dR8V<@M_`C^ev(yOTg{uf=L3uyDb-w z&)l7KXS_HTo87BxI}fXF{ge&5p&IHk9M1}eNAwqw)`eZSOPFhqjS70{hyE@C{oSN$ zam*`-UH3RF-RWEP`^Su1q#n_J{AncekkV4m7YITf%QHBo60h@pk4N4O}hhf%rxuIZGiQpprVMal%h7?8+cY#L>pYnx6v!EnuIgInW` z)w!NuTp;fz9md^}*x@K9+`^2LO*bZp1^?BG#iS@(4i%AB6YP023T8Eb?M5K7ElSpe z9-wA22Mm}VwDkmECLd*}a=7bCf(}@SHs6UBe)Xvk(+hQ^^unj5JBeo$=><{4PBI%P z4_9XQ=XnE``;1Daa6f`~rGwNj9{YXY)eIw3G90Ip+QEWg0%?g=i$UHuQ?Qc0OR0!w zv?BvlQa!QMyI*IP!0>goBt$xo2^hlD&wRp?$=}}#?q~Yw z{**_|5&yL*Epz|4V#SJjg-lNaIx_{sCL3R=_VH&_;oOn5J2P=h!0enu-i%FAZ- zw`Hm*u6N*}&A7pAqr>-?%0(lveb{r8>hpDmex?Yo*8!-%1?YV0R~VEPBFp>)ba=mv+2(#>WEy0yxHZX=Cr2 zKmew%=^>HsD3BtRR*#H!@!TTGcI&fHrVh)P&|X;>)OHML+uWDn(dlsDjXa;5uBM$r zdt!r~ig?5iGbx!GpH+kdG8k0%;~)Q#0L6wFROJ}^Z%DvO3x#yNk13^&ccd&l)BP9h zD5cU-qZg-rV3Sg&?)`x}cI3`zw#zq{-eN4pNf(+?QuOG4oZ7zMGSVqOUe>`u=GfKM z{xPCciJFw9%Pk+uDSoormR&c=fS#hGOk=RGUtizBOoY^8P(>!Si|I9i=1ZCQbcc)5 zgE6UED;+b$4u&#dhZjdXwO3tpG0QaQwXrLOx5YP#TOaS@FP!h|G!z!Pbv?hTp0eQL zoUsiv4d@*Ck#ID9-ua|zPbQepcC4a>>9-bJApd()Wg%}hj#%A4pO-q{jIJ$f-SL7- zo&=keG_jhq$Ty4e|J^l6j6TQ=W)|~&Ei6gRn<{*^cFG*tS19#kHpMD7Y;wb~!3_%X zS_-3NQoGiWCX!M-Id;Nsg7oSi4VJ=Hi{bYNfjnmTq?IyK@@&_uacfb&8h@DIe70-Q zZ^KaT(4UX*vf7@A7CY;P!IVGIuXPRIe^&71Z1EyHO5&^=jUUKHF+h&m!4!dOA+!Ed zfA#uQ&p6vD7|O8(?5`bf8^gK)6p`>+$c*yG?Sw29;OD+tp}kDD9augDAEXWbSVoie zpHF1Wj8lWfIZ}mx%(2XREqF9!{fNd&iurAaoQDMCSNo!vRHE8wH%QLLZf9u;ADqnxOaAD#VE%Yg z?Gb?EmGbY}a0|vSZPlF3z6;Kf669Bf%h zlSGiY-}E4LFurm_CJN)(*l?=uX);o&R&qLuzENz?9I%S&YQ2>rVhx#c!hbvWLL!CI zA8mXM$zjnnJ#Me@-99}hjxCE!w8|9w{SBlj%Miq#dvS5GHP!DxO$sDx^4PF^#`;A! zb=bZ1pyj{R#9h$r7svB$QlJqeF1cp*ubT12UZ!deKFG%1N<@S2x&2UtqsVz zn=gF&$D4i3x7&vdoa#^cS?bQuP69OpspVPxm*%@DSWf!NG`o`y^R~o1Hvta;#!r%i zvEB~Jsi~sJ7Y35P!bf?OQin->fAk+TpU$Ow1st|l9|i2rrOneBP3&aDyoUj3K{a7! zOYpnJyYD#nr4GNJ;@$ce2dSN=eS7f-VptzM(|Ek^ze)mPVrpAEgrFs3mL>f(ZwriH zCZ65HdO0|W@2<+v9t?J=-4U9>bvM@@Ew4uVZy@c^Ovw9`k|$!+CTAn(u#4kC7TVTB zXuy#d+GC@RIMaPyp|Y2jS%RJkktCracCaLqfs^i^XFqK#3z+d}n02*VDF&My)vp)lNzWx<< zGB7hEAH?7_joYR?>+&+JIas*%Oiux%kr*X*B=8N8Ulowx0MkRK?pR)K1F_m8>dSe54 z)48k>#|F!OV#yOs7xQNQ@1iun5pl;py{tx+o044?r{W2O{f}3r{#QS#4bf(|f9R3y#6*0YY) z5Ey{M`dj)yHl)B{sdmvti^b0IE5xFx%jJM&5w69;`PGy0vGk2ztSW|5H3~zhXO?mn z+4mo>;Y7=4&gC}HifyMO`#70u3H6;0|| z!l=0lP|zVF`bfxm{%i98943^7y4Iz};Z9F$oY3iUI*FIsYa=o=nS^d`;3?*wDxi&| z=?oqs6uDcd1e_e5z7M5q(+I^PilSRE(T6%z<=U8%sq63V!wELY9Rj%#Y@2Y+TEJ8(f_Kh0ih?l6E6~wDl3~?-5%7>d{ zKs0XHUeORoi5+U#M{kE!Ae%|)^dabh1DsJI9N~LVXp*8$XlOfc6J+Cc?}SM zsc3N~L7hzcpXn2>b(_YN=J*C0N}$f_NINTiV!~L}nA{wn^XfBogd5hu!G?*THg^mF zFJm@9m{X~X3t5{7 z#lWIO++R8;BTByGl7U;fz|JBB^*4R|bLvm18x;DF*U`=kyxbH2nD*RIH5AWfJ4^5o z&Nr;*|NreNKo$fUI5}~n#Xcbjr0T-7MV;wZXA(QPt^`x;=ZK)5^`AFgQM?7ry_(Tm z0|EhWs&cYJW?|uvc3af(tfuyDf$28~R=HOa#}3Edru##Wwm0a$Vnk=_8+eQ; zfyq+GVt0Twr^QS*HtI+&&>_<%-Gq-!{iQr-3LYn-6bqW0VW)>%iat!2IP)Jd+LgnS zgI+jJ-I9HMJ8Z*$2FjwK1T0RpF%U`&x)S{3HqRJ z5^;r?VoA(k7*aP@tzB`O5Y26jv#x54xNH;E`KzzLxC)FEnQ<}IR#w*>9sq|zFzZq< zdM1%ynXvcLfZ{Xm=l(Op?=XGV8`BwRiQ%@@A-GnjD+y3K zN2Pm011b!s`3368%P&MapW-PDulXKfpeyRXNjN`lKKgC%CplwE#GrRw#0FE#Q4>R+ z23B4CmO%uy8Y@;F$hCHU6+oJ}_cKgm|4Amr{$`38ue-?+GX1T!hd$w@x=z{w30Z*W za@$MLl^=f#*oR+8(&a&`E@Bj{{1O;DPjj$g9U7~{m*?^Tj}Rrc^wc=(SycXVT?bW{ zUus*6{74fo{nOh@zQyv0g{)t}Qekl*>KXQYCI9m2jqge|&Ntj{V?gLs*_GkeODYhf zW39Q1L1~vk+#E^S!nCyO&z9Wh}2=K}`9#{=`j&)^}8=U|lz}DqgAteVsos){s zDhK`>&pK%cVuhO7tPu7@Y4|yXAdHs!(uKDuLL@i$Okc6Gs;2456Br??ZNZiONAe!~ zvY5w1(C)E9fRmpWgWU2Su0u6~9{@wIm<-lha;uuEN>&C^FJ#^|oopkg``l#i0&{OX z%rI6Q>l^9J++K19D;HrFU#V9o0M`MBTT#-(q&A{|n-`T~CgAFET=$E_&pIQTPE;J#&nrwf2N^I*d zH)ev~7d=Sy8<@syK<`PFvNtyfa#8^JceG^ua^o%!fl6R&j--jGkz8wS`EgfEZouOD zr97H059Dj(#$*$-!UQLvb92wS40!wJc!4K~lq-K2h2rXunCs?SjQERnvv9Fs?tF;y zWUTcQ&PtDMbsUY6_&np`UGMS0ZZIhnDh~p{`Bryj7XS~*R}%z6 zUO^hJn$_-CW(;$)hHu0ej1BNqv^o%*D2gR6zUvCZyw)ddNB6JE$;okhf7PEEz|dRN z$sP&o`MU(L_I8mDW33;)3!U*;HRm$zVV%%zaDn^*Qj~RdWdFNb;^fRhnF&{oeY-tv zq$p~pZw)Ls$EWKsEZubtx_9bpdCfsjdy*<8_Io8VtCIC+8kk@Qxdti>xnu}nRYJ-y zp8$3YP7u;u+YlPQ2`o_>S?mpXvd0-x!Z3=}>ceWDg*e)+#wQLE)Uwhneo z;*y`VfoY<#lwT^k4BP(ytfI;M`FoYsedi}L{1V|Ho}ciBs=`@vtgnieHdpWz%Vyy$ zlnn?k0KJWOnlJD9>6y64*X=G{lyl&%pV8Uo&>tXw%1za!6*YYVB$jR$Y0XhB#1mVx zvjd8N4X~{Dd&28RVEkCw9TLN9*Ng!?9F88l2Bl)w%7!97mtx5(Qx%1u6h+$OGa4#qGGGI{Pj4d)5yg8F4O2sfu61u0uM}?$_nH8=0St?`ogZ@1LAr@*uC4Z9(|dIQ z?OH<_%?PD56K*Kty@PQT;W#)tazY~|I7-aq)tQ($$#Q?{gEbJwJK3mnk)|l>XgmJQ z_POHzee+4NEWu0i0zUFmLTF(zvD3B%sp1_F7 z<|O7{-oZ2>t9k~zX0MDQ(4&(YZ#~baV{$ah?o_K1p$Ad`PAvgtuhW(xO{@bMjNb>Y z-k>lsDx?xX;x5*9RSpJe~BwLtb79%{p~+JTs5HZ&#({u>j3kAOLx*Y zW{7^+`OD%vhcxVW39F$jZ;I@H`3X?>Wwt@269f1o{V4-t-|dX4x7L3j zUHltoa@jqToWvn&=0CF%6%D0h50m^)qaXkRMC&Owv8iG~$}1PBgld3nBE#Rg(5)8n zga7!2@yjoBBoF_e3M$ongy7N1L_hT@!LUaCXX6QLZFKcq1r;;Z$sca}zfwaCji7PcbfW7H9p`7Eh$-j*7-=%{5f&}TidFWiMr=NYvc}Q@gh_z)<;^d&F zd@za3ugvK(BbprUX|)`Rk0&+6)#sm5S8a7;dzrqn*f)iXpvW$BVu6u)bR+ywtGne@B61Om=Q)yvb`45S}|LKt&5@)wSOfk;LhZ^UofjlQz0h zm)>a9f&40n$;-ndr=xntY3nOFGmA5POfiIsfgTzT*Cl zU{P;It;qo}n}IeEA1&?GRONCJp3=_!ce2$kKRZonNV+tS_uFPWzeS zhqSPws(Jp?TsgNT7yGtphSz=h2-}y#HTWNE#@LHFs^pseT#RfN*P8yLUm`jG1N5s* zfU25qv2akmjD=Q`s4SJxi@i`xIOCdT5B%W6wj1Fz8)Kuv*iB`}b^(em~z zz4~VcUB9M5@W}s3-SOWXu+*?)Al7p)Bw?jh8_#s)>lYp{{b%_vCY00=iC@I3$FcpY zYuOjg948l-C~}cDxL!%j&X1(H6ZC7U5?oVLQ<)zh*qg)k6HdNPB;PQcbVRXucl7>@ zE`Ga=^8RPrIRE!3E#e-v8MTy%%a1yk_k{s|V-=5ML7(Mg#S@LA3;rEyjF&X1w*^R&VJ>2%B@{=W9BD)oa@0!_Gl{G8Oe+Vki1QQWd~<<~Et zEV_YlJ=t8VXv>#L|FKXIJ)GZ1(d6xUoSPZVFOzMhM$6tgyhWq=@}=HzWm&b4o8R}L zQd7<0PV(LqaHYNNcXtTN4rc2ov$)VeRm&}XS-vamGB^G4tspa#HrPa5#22^pb?s&W zS%!p!fba6R+WLMjkeUo!qpKob}#cMpU4(`C+U6R8i>qlJ&Hbh52enW<`FmyjlhwlfIlxyu$Pg z3uS-Qau7K~%A$hBFocIe2<$LBIbEI!uddh9(JX=++R9aM|DO2#5*qKh#Zq^~O40f6 z0#s@~v{DPy=4^A}ieKe(Idu22Ex4~>p=#u?w_Lx>bHE@Z4Dh%iKrDJj2IJ+qNDIxj&WPRXRSaNz$JyFkpFK#gLAB6G;4KKql{+5w z{2yWKln-fjDCc()q_W&mmIx?JvpXPb{)hR&ok40*!M7lC!&?b|=efwVb@r0;FeD2( z*x!h~5OA8DEVr>6PS6o_oYt+7HY+d${lh@ruB?hP=`vq;@uLNGIb%@~*X54+`NY0- z35nZLFQArwtL~;t?sb(T6k;wi@v0FFLV}%b1@;p|R%u%8ROV= zRWO3*fG33>>}We#nQ5Vk3gY2ODY5fL+-E@ zvWG%=(;1n3UEEjqSDn9V_C*FMSXjR{uYKa`>$>D#@FacqRX4qmy{)y4&Gf)@V_BVr zvNEa@r<%e5HW?jhEb!SY6v|~N%22Y0992I>~ud8In`Lf`QStH3E)x@G=`2&AraN&V){PF%a=v)Pu{I zuQ7a;TZAlAgDiVUO+`B+z-8%M0kCiylcazP7I(w|^h*D4Sn6R#-jd7ZMN@iJo=6v2GyL zo;~Df{e7CCta*U4B1pD0lfi=EwI3CTf2}#(`mwSD-u-%XLU(&V?BTG?P-Fx}R5*E5 zcvSdpxqh`s3e`yRJ6%Efp|NYd2}SjJ)h@$9391YRLSU!qq4E=W9yx#}_KqRcG)(~r z!+&i&OckDJQ2El}fI8mdeCHPcJ2=byp-dT&ZFDzLuqc{lvh)^vKB2 zL}g}~j~QUN0Fo{!0BTTKwrDjx#j6KVb>MsCz=!G& z0?uz!q)+3>Q|KAM0zy>+^zjMt4}XE)t2HIfc*Tmi?$;KdI7B#Aw9_O-Zg>98L}4}% zna0Es9syWr5+f5RGVqawtNUt}*r|Zy#6ay+mEGaSGMmMOW%88u6mXzDD_wlGT6!zy zpLOrO442P{0J&IYJjqwrVrEF87ZDTT<9iz5xv)C#pUTTj+d73+z7GI`Ehx*q&zxS(F>^b?4*udLeSbU~XBKKi_PI+| z`R!s3tpv7gX^R3~Cce0vX(P9@UCS)XwG6mNX_eM`6X(`UW>OMp*nTlrcUU?`gCzDr zKR0P?yj9z#ME0=e!>GupM|%&t{Qcx)sN)wVzW*5E>yxt5g6NEc!GR+F(!Nysd6n&^ zN?K|Q@t>y$%H^ z1}}eMB%-GY`CK5%Pj}AkUNRem1zBUE6y}0KA;6;dZu&VyB`KCwPfdQ5Xri>Osl*$@qxi zNUlL!r3OOxC4C`xXPqL4Ec)b`ajpfaw12E4xMZ6=Yyb-WN0LL2RUzLj zAKS$6X%>ekm|3yQ$#-`3N8ah|B+0f4bxDc4nfJcHZ{dlBeXYRL5bY2afSAF|vcc%G!HPxGS8==1)_U|T zNvWWGt}f~OGmCtqW8>q3f@5Go0Rce)p>g@dgop$3UUF3))$Wn6gRX7M3GQ}?tC)i6 z5#2fg?U#)GsvTF-;w zY-Nw9hPGMC9F9(W5F-PUEmiuS(F06nlcE{I)}b=%A7_~A6cEH$BClS~DB|X6Z*IT2 zIpOX|#S?qiLR2Osk#^=DtNG&ym+&FR*Kv8P<@ep!ZLZtJSjcEO2t@V!3dE-*!yhNO z<`xWq;JT2z{)iLD9MQ;&^p<*B%Gv z9;zH_>TGtlGO@9MT_xDkFS4=QaZA)){{?|_B)8Hw-q)H3IPzKPiHM2|2?0GNX^+EI zRf5>q`4yE?GgaPuK8|(quyuVfv-aF(wlXs_w}4}Na=7tnIA2P*pcwxEhcBp%Q-6rI3Rc0j@jnbz>h=|(@M6C7U>fx%lJG+#q2Q4af?@H7>c`6Fw&JpwfW1WFvJ!J#H z%4DH$Nww@r6h6K-1K$M;1QOi8g)GMGRywKGssy2=E7s%k;ESt|W)#O-pRtb)vf8-D zxR2gI3De!E>)xMZTl>m(C!Tx|_c}u7mC!FmY~hT4&*t)mO76L0VQ$Zm)=+l7>+9FH zfQZjFC%h{enbPhuNz~lx(beZsjm#JG@8B$iw_cTSX-?0fRc}lkFJafCcF=wqJsUd8 zMn~$&N!wK2xp3mXuom2=TlzBdg~W^u`*x0IxUuITUpwpCCpIqO47DsRfB}i?8mn+k zO?VOK*oa)bFN6F7oN04eyGiZR6q#;01`nk`g-ro<5USFo8#dEMz{N z)FLtwpl>inBl;{0syyqD<@D`l$#Jfl)EJHXIv_2TJFdCbB1tJq2^~2}iq9XvxA^o{ zn0YLREmF;vJ(gM2^u>gGlpZOM>hd=@e@%v3L4CC$gdajz11>;t>9B37u4gN+c2EaN z7N{PzCO`Ov_B8QVS#5&Tgk_TYRF@xdXvUjab#=&lP?prpL~g4|3*W;OC@JF8+0RZoP6YS5=9t%X5j<@=9s zJZx5j1kEdx-027b#7vEm4TRT9soiaOv=y$Y#MT=^nhP%|fDdU^7Ez#Ft2I{)2fQ7` zW7SkW?%wkBWnL)w_~|{}hkUWMk@uEt@uS1%?(3-dK@CnX)?b$25^pIgnsh^HS!eiB z?gK|C)llrf;ga;b^r9EOF`p3yYRe*y*MIBz1Bd-qR8TlBdJn2ur@`?phF`DfaY8;D zCwmvCvRQoWVlI$tetKk}o?MNTX9H3!Y@C`PXWV>S%$VZ{%|p4jHr#UH_Ryyow;{{;KtygLxrG7(#ca)wTYK z-Y0sN6h;=V$f!GPone8y(zPnL+1N>PyLSs(y=`1y*FQ1lR8e`3s=cW#m$+c=3)Tb3 zN7!8_R~a%Ek8tTvTN6~|O}BoxmiKrt8Mkh0)vSD{hV=%yVvnL*%!|m2!23pSnTfsT zwQ-^GnI8{pLlWXKtGU!5h-Pk2LFIGB{oj=);~!Nlji{=PmP~Mqtb8I%bKzXfV~y`v zhZpp~H7qb%5D%?Sa5$&Vmvl)54qk6v;W{B~UlL4_ z81zf;L5bb3SJPuc^~%Ua_>tB)$VLK>FZvy&b%*eB+g)qdbU(k_R*eJS(gX< zJxL0apH$ji6sKDr)n`3{aNlN^Qwkhtd8DRdnV96&?L&8b5Co{7; zvmmb;3CdwVs8W1GMY~|zn1^&RO1t0hBt(ULtGJTf^IAMxRpD7HU;6{ij?XXdjHv`a zw9!c(a5cYpR_vk~eKYL+k6gM+5023LHvMEY_p}y=4k&Q!!C<*zC^2Ia3C3Ji zL1sbM+*p_j602gKXP|mF$s?~%_vnUv zj52~Vd_MWnLq+!(*+*-Lw~%K)_w>^_onjFhcBsl-1z4eAVzf$ZoD9yB+;Sysedi;%NXg8B1{e-#F_eG|zvUc4YC2OlIpARjmdsP@u05 zr*U3jsq00uHQh{r5KWSeeT?KjD!)FjzCJInzFM??L^jL9NcW`?Lr-^4X;Bzlu&Q?y z02M)ULBT=3$s#1Y9wAzg8-+0n||g$cI`eH$?LAzF9rpS6h3c^3UB*o~o`&^2bx~YDhrzULrno%G+^r zq3*RFmK+#R^m@8?svWLq){v0z;Az zxet5`c$dkiO>9f|6fbU>MAIx-Kjc(r4SckyK$1&9Ug3)mVCA8Y1>GV0bcjayWKU?1 z;d6`Ui1G&YLMmdtb&4SB(ffffFqD_1Okq%F3-y=7Xr$+V_G^RS{QgC zXKOBBq9L5K2Qnz3y##l~^f-q^dVo0JTO6ysmtjFF?tQ4=Mh9FhB)1vUcK2(Quo8ja4+LSJ)Y<8ba zuA}O{%Nltg%FD9=r+$Zri;I)XEgq8j;?A9Ap0;b5j5DIM+@eRt2of>UaXBan>ZY7* zVXIJgT25e+vU`n3vm9;wD-XX>S5Izts;k7?q0ifUbXFZ ztu890yFSO?daUUr!gp4FD4cm`X`a_ImZ)oY+O^`2sgS=Z-sfHvxbI807yFk_pf??D z)@elHpxFmUW>0G7ey-bx)DpdGO}*NS(z-#}PYqNxLg1@YN}fvhUtBLqKc+GUT;OW% zO_B<`R#rcqET`udx*1pLFro0I)_p#G&G^C(J)_;ph87-;WP@^*-yrWnJiD`bUJP4q znYR1%sd_A6GDQ|qpc%2A)KEGs;Y;857S{2jmRaCehP?GUgH%@%HTz-B?uYLBrVgP} zH@h;%V${F6+&AJkBG1T_xqmSr-oU0c++uF-EFD zir8XIv!Ke#t=O)W|8PyRa?ZUc=)2$4uI5;dauysN?Iuy7nk&-rwtj_ zbqWwtQli>QcMkpbLD<<#ef^2AtKAu7XV^+t%ng>C+4%Wb9$F58#E^h`#n9f!Ps zj#E`k*Ev&FK`3R|?l*-YBQmL)w`1e~thLbiWK69X#vg3g_b_#aGcF(hyvqEk72SD; zu~^e}9oE2m94b1C2NhicobMMlg}U1!FA|mJle8de9Xe&=-H(MvA(68kA0+z|@_;-# z&(b*W+h^U$FizY_L_j1L?db`Rywq|kJ8nKA;QjfTaq4P?Nw-t8PTt*s02E}f>sbOX zogFNsq@})oI`S|>iHp=g?5*Ri>{ zfB@dk5v}dqihux<=+%{)tOw&-*p;K#;k0?3?5LDv#-^~Bshk-i29xz)oSMVH0{UfE_@k=$Td6mLADmA5HCS>H;8Elg7$zuRGQ_PzI@ zO7f{m&I)ngat~(Q!A^05yQ_P6@m+rB1*YFo4Y=~o+^59v4+%;&=jKhGbUydp4sH`1 zy;I`gK$wj(W`yp3Yj2)F9^2eqVW8uZJUv^BWHR7|G0X^Vuta6p*nh6WK_UPW?g|4H zCB73}#_XrDiYLG?L;{a;A`xflU$&e61X|e>FFS;FXT~~Nej^;8D;T+(JOGZ)-YCl! zDic2c`~DhIAgQ(OXEkNRICxKJ<<&$(86$}P>l1x?yCEt=imFk`Pe$TW&4$L37fnx4(%*=smL>0uH114m_}1+sdfuU!A0Zqzr@~p)h_Rae)3fnObHlP6C?me#TrO zCzi%;E6iC);zLiV*o22GEXIF{NL2tM-wS{K&aCtKGNF+iOQ+JaXYw|H4%FRB?7R&T z1KbAY2p!11zb8icU0Q6TPkZCL#ztpG;uZYw`xg!FyJfa%ZgI;OhQyI`fsLCle_S+t z4uqjjj%#Gy0#Ipt92R{W{euP*jXIOxh~qaUFM9L1FgE=XM~3_=Bba|6C*-;_c4HdFiehcxh0 z3i5W02=DV{(OsRR{NTp{O}%1D0O?=QOrHWG;?)^(Uyagt?*2oVuw0Pnoh8{=0EzL^H|PjFP(dF&|L7WETT0GcVgY_ zx1oq}^k1#{aimB=*)HzvnsDIHm*|-4-oMfmwO_ThrZR-9o)Q(i2K8OOn)fj<5|I>i zrMN-NYx$b70)BeTtJLb1l@(5>DzdL{44E$Db`c|6v{j8rk`njaT(d`!Q+zvdV+~uc zwOi(`abOznKOr4><!y3?&Pn`#_&3l#Gef?)=p3_f^Ui;vfzaAOR#H0C- zC_m1^677NRcZrEQlhb%^AG}2eIicl$V9+BoV;Y&B{w1=n5~3`>l3tCJ_iei91O5sJ zlfRNrKdWsWxAWWhrxQmbuci*ftO7n7Oc}WO%lj>uVaUiDKPF^(#js~|dl-WEB(b%;R&%wBZo4s*Feg>11~T!zk!KqRO#H>GQupBCvQnt=r+5tC~|_jcwZextGmQ=bxnE*pJAI!;`6FR9y=}o5@Ho683hnm=2#mq1!K9 z;~t#M?%xqQa&ju$A*O`A5Y;)3bM=^-yRtSfb`+m*&?NHD1^&k_^1V`zUUp zBQjO}+aSl}wx4UqTg2FEd)wQlHv^*CRVd!3FhGRo(ku4))jpO12ugP&rZjKiwWfRW zYw>!=HK|cBWxk2w*r^o8&xo`u5~q#7C$1%JvzI7GnjkBxN}y~)MsK5FzthqT)I+i9 zLQUJe#tLyOp$}IIr$A@HkBqga9H3%Ak12)kQ{#!2%+*+9#70XhbyV%2UkvY~D0|mM zOicCza3cpNf8-DDqMQ{MkW2mhk21pBOx#yO@k>+nz1ZeIc+LzQXaBES&Mc^@EREx+ zqiBmVE)B9tyJ8C(1%!qWVxu&JY>L`J5QAF>)IcL^2uZMMRMdci4TdEsixgYJCJ-=e z(Lp2&ix5o$VGm(RSON)Tn;Yzh>4%xBd6>6bx9&ano^!tXf8ROv|DAg`e-7-iRZ8cm z=ml-2W49d)ss}v#)i{V&<{UK+J~DWlkr^ixT(|EP4_lGEv+7l6mX7 z`rnoA>yKLGlLdp#ymRS3uTeX~bc`pDe>eR8u{uRKGM^xch?2hX5Bxxz6(kXw^chB# z#7h9KbJ}H`x6PI{mOk`b>sfNpaaH^>y|DfmqK}?)K;U6OD{UDN0WtzaUnVZ#(spqZ zVUr8UHtKKJjt*vN1d8xgpq!jad2C3(uDSb@6AQqAzw;SdN2f_9m=Y%6(PT^t2e zg=!ibR|V#v11NDo)>*m?5o>hTQnM~G5obZpgu!tGj(YQzF70x0uAV}pwc8nXX9bNO zbd)kXD!8@U4%A|o<87&s*`|`dnky@hr;;ZAo2~Bu2g7qn%3zfDbCVL7wu5 zo6Tn~<`BAK((ct9AG1D;F6BcA^^r>vEU%LrOxsOA%-~5M z#X&|sFPm7+R$g01eYw6pxAtP}a&bw{TPi%16;?Qf0?g2_F$#<3}XnXEmOcm0X z!{Mfdfq*I2fU-a1TZs929@5Rg{4M{z@?9Cko|M^ReIRLnw|jnGRaL}G1ibFOa|A7s z+co|6Dsuoxs)B@lW!!Fy@jnb5RF(!^gPXPin?1IG|04fYi3yRqp(DWls)4f1ZERc>4-}4==@QsXQg#VCX`Pjnxeb({{Mj4zJ&j-1gzqTJ&ZexJiN=qXShYkaMiouM$* zihdgSA>BBh>UG8sz{fP)%#B>6)ZZ=Zve3ylD#}%J_s_FUjp|p?zS5nme$D^s9D%?1 zd2a%1f&hF>jr5)w_Qg&=>>L|+n_ZGJ{}HuB-aWy6I|{a6W`Hnb;cfm6{HJ~AA5ZV+ zO^P4X_D8eT5KMzCi0L0n3XE^`Xqp2~J~>=whP^9u!!3KaNy^5JOLz)Qwu7R8tf2ks zjisRN+T82EvVNsTX1X}xJ+r&E1Ana8Qpn2QD&fVB#c4QXwtxn8H8-fA^k_PfU1K3X z>IqazcZf<=_}R)j8P@aQ7;I*x%o;+#m133p4|1XdRsx)DWgq8qRCq~o16CxrvV~U` z$2#Ub_snsmq87&UH8fBu1S$k8W-@S#nO1mvLoQ#oa#qzo1j5WsbiT7n#x9E6xctup zJJ%*Op$=MhR$JZqbv_dwGf|=jmqw4H=Qe2mw@dI%LXLx+E_G`7=_yvYv(qNF3xrZR3f^9WzweTrZ7WqEQ>&+*-xiy?FBw3-ZWJN4Th}bQmbtp<+ZqlYjQPJ zzNJfa4MuhJC8X&CS?MdFHTA9?=isQw$nkr*(2+Po!G*E?U$K}~)F4_CUzSe8@O3kZ^Er5IyP;Rw( z35J!UL`-m9!A;qPy7nr*dZ@-uSCrN8P)B_V9{n(?zi#F`+gKxs#*j zIH*Icy{ipTSyFy2@?sB~?5qc-cE2IAHt=n!gOV&jwpC}hxH_Kx% ztE2W0xmBmGr@cJg0cyO-?r1X(kr9xzu3+5V>1YzBtuK6Ra+RToix@7>2?<#qlBORE zbPI%~d_ybB0wTJa@)1vVt^ENOxF^N8TUJ5l82Ua|j9w5GM!ns$6;8y2MsryfV`-qN zEznw|%v2>{C)I{qY-dkz`?}Fkw&fQ zBN#PretyOeaJs1{;WawCpt=$SI;XBPp7InnGa1cDG>a+B>Gj%*6DIE9rWl)H8{q`X zVd*sdD=SM1z|Vy6zDVL-OqDUa_)7$Y%8SwTNc$fK$`(EpOnd?|qD%^KF$$pzZLs>; zv5g|58uwUn(Y{xXl&jn#G4$KyOX%KD$tr1&*MWVUnx;mKg3#9O_l|8-Q|n3o{>>eu z!`5^oYumbF>)9rC1!*L0!jnc)RWy#I)ou2c_^7-jK29i+|GW6{gJ3&?o*?PGQU4@` z$7-B=gU6FGBh1l6I?5Y{G*rvYh!1zuM?w70^DH5@`^PXicUM2_WGwV*Cy$rqr&KUs z;}joZDc2XLy+|3^isfRqI4kTS5mliCSf3Z_X+6tS(ggtRztKx~?*aru3zmUEkLmby!sE-ZloZO_Y`t>6Y$Ly1P@lk?ycSK)R&6OFD*7$sq=57)m6D?#^$`jN9!w z$Ftw}yzlq@^{wmjQf8PnYd!0E?%(f@$3O)+@w>P1Z=s-|+?A9NQ9?mM?L$Gi>i)-7 z;FZH#{oBA_R~(hZpP`gM2$z8$uA4oTeTsro7IypWIV$k;%@-1yjwmP?PVhfhrcFuQ zP*C1rN{T#HanoBrM|UIK_dfItqc6S?i^K#wb=ab?`wf!gEn-xkev5WY+aryTcai40c^)|>K>E+ec<8oTH!6Jvz?Pot=)BPAz*Z5>N7QUnkVti;^*btsSu9JUB@m~FS*n@cgXc6=9G3|4JYC@2aKBbRSEYonlO za7Xp=p9IuQxwVwM&PZnCJ#%x~OjH`hZAy4prD3VfDMm6~t%mQtl1`0vY z*HSSM%jBKyrWm|{+j6?LEI}Y3GvqKEDtH)kdJrmQRpWguolR0j=(SSeI_c4Jel05F zE(*$y81yR2r!Hccg3dmurS^Q(HErm&J9Lcb19agHm=hjsYU3Xc8JP81a5~KKILPL7JFyC z^*y&LQk#x%OoY^&&%X9NV8Xxp!e{Yo1&Fv(yp%lKzl_l9%%8x6n5Y`}aGHU!@%d=C z%jwtMQ?X)wPTTQXsI6($fxrBiWKUnp@$!V6r|EpIV72dz`))g5bBFxBNjs7q0h_?| z+eB8$4^{il7xeGQr?`&Hv+-V>O$Tf^Z*KOwdfAV%mO|c1H&BWl2sj+taB>rPpM2Ks zBTjfYnw03!%t6XgR&N&9DCQ*5^#-(%(Jz$S5s>P!v_TB(teM{aHrGek#kJFI=zD-| zcF#h8!oH(eZMS`5FU^Vlw!V6P zQzEMlGS7gS9xjcGDfav+vr-4~BAJaDGUC(`T{j2v{X^#xw?pNF?_27&6{QB-d@81T z-jvQ!gz*74P}1rns(}HmjXUJydQr5B-n6IgyBo%&<#RShWtQss{dV*2*RaN!muBb} zZBwb|QQl@PVS=EU>8^+Z)QZ_ATzx_hx8TNFo3PrwHnftOgs4nG#~VdD!^6)nyJlbO z60GZ^q1Vss__}XBJROZK>0Z}AUiyRIlw@c7XzjF`2{syyG6|e@>Q88&&ncr@ zyL*nFhnc(7S6a{Y@q4H*1@~P-uU$@Y??fFAT^^bIgMnpt^lYt6P)Fa+jKb4p zZ?a(y9I-9h^0XbT>Ehd`CI8bVkHh_97f{nGrvBL(!@$zC_yMt0=!XydN3CR@_mZc# zzSR&{_SqO)=z+GUr^3#2Z|8}7`RJTNUqcfKh?g2YU$bK6U3AHNE#Iz@u-ounY9?{0 z-hv)})tBIH+I?|E1_`mA!fP^WBqy3Y4a;XR(;wR(FXiVP^nw}5Q*d-Ej6L8FeIGK` z%;B=&-IU%>;#5Q2qwWxVl-YB)%VX;np!}q(Hrr5%~#e840K*K^J zXcHTx3)+WF6rWzaCOLOne!#;jc)rSiKz3TfJ8HH{jDli7`g34i??`x8>?ZHGakeMr ztT#S{d9E&*&kEl+Jr9sDc9uJ{rKTST%iDCs3SLZK9zkHq@v^LBWkl&IM4ozkJwiOb zFJ@BFr3c!#LQ)h73OTLoo<_E(o`IQKgW`QBL8B`n1TD=mdM|4BpF!RqRe0{f z!}sj9;oIzeC<8$;nc#j@&rR`xcC?El2&4SX+3Fm*)tPOw4vf0Cqe0)YKCS5&Gt~@r zw0Ch`M8b9}Ac`y5Jh^pQ;}Om0p;gUQhyK-E=%sI<`?H{G4fJCE8Bg0~Yw`eyyzlZ$ z0{*b26E)cV%nm-^VM5cm%T8daTZY4zIv?Z-=4^S0c1e}bT|tl0Q2xF!2)*JqxoqPu zzwg1BW^PPsEACOnTf)3YM2VZz=W7+7O@!6*ZcbkFflHf{n<}Jb=R0k%wKvp8K{95! z$pt;c_|DCr`-q29D}0Jo1$0`sIRo}!YjT$oixKNbi+kz)J?`?l;~g>YNifUW=0DG- zYBrDfcnL$m0;t6Onbp&hY^G8DV;IwC;Q3l8RRB%qZ4@Cjcp0VdUOW2yl8X4`m3NTNM5AZhNpzK~ z&uW>?=+MOHR+1U}-QJq1&EjV(W>ck82ABBmrymA;NF&-Rd0H%aM(Q(##X91M6JK1h zncX~}GIHf%?%Gl(hQdac_|HqCK*lo7_1hODTyeKpJCZ``dDdph+Zf*EjY@iNgKfUEl!h{(dmX0U zNbz!;kR{sBr3x_OwFRwzHcMjq+Qd^|;_NSb_QkcJeIirtLHIsFi9?W?mw5}-ntn@w zp8ke;z?rkP`_|2xrp?dKrxG{l6MPoj=vB_NSmHOjeCA(FV=LXNeov;i7%CAVc28G9 z@mmb6hyFD8B|rL1Rd%Mk%g!+s02W^9s-9O+^623Mj%Ds*tiBicI(O9ew4&MLXpmsU z^r71~MeXK;ldWsM2Wu6V=byFJqzATP#3zt}Dvptv`red+?eANkC&_Tz^}X6lIz4QT z=4|gqkA#pk4_}<`Z8htj)rv+ko*pr928n7rCSsBi*6(HW;cM+m29P2} z!v`B^9BA)Z01N_^hi#`)S9UH|+jgs0bD&Dk5vERZb3*!ZH>T|x0ZVYP*VcijfX(_@ zUGo`;5LO${U%N>I@>!{7n%wXrt*M;e83%!iq%TYl2Q6T%O|_HmG6MnCTs1}_o}a12 zmX_+frrnPAIVWAZxGn5czTuRDpLn{lWgd>$xrCl&94NcW4WeSC4<8m=z>K0w~a56+P1wDksK7nRmdn4Ee zq=bJC5eDh$Rl;@wG!s7z9W8A>EKEHl7uX-2KHbtCX+rmz6ZCCyq+AJ}JL=rJ9XaG> zc0_4LFR^}Nqu(@GPlJ{U<%~RiBSj!!U+O(`X~9)oy?SiFzO8#ni7%Pq)>~AwwRPmE ze_7!j-)1dPzAo*;;{0NBCUkzAQ$uN$Dg)j2qs!sZXqAq8_glj4a-dQO+U3WY9(o@K zpZe4dRjqQ`o(k4zxSoPv&Q{9ykqo5Z$7Yp)1U;p{WA(VZs*`H@nl$cjcABq(>)V z4s?5N_!w`pHsiSp$B%E%>iSm8TTbt6;YQAcua^$WT|6m2^lZuSvvmlU-t|Yju5Ca5Cb>mVJixq34`PMiwUGtt}AZ4}nLGr6Kod{&6Y zL23K+JOusXTZFb&$KkZ^W+s%0(kz*mg_oJfTo7q5DSX1X@*xE5(7!Q*j*vk2PPuCYwgK zvyhqQUV+>`k?(d+J}#z)d*3Qfo3=a9DO}4r_BxH4XV_0)Gl?0IWpq%Yub)OOVcJzs z@5FQn_}c7jruw>Kr>!mumWzMqYjm9{gbh+4*yAQFA z`s72sHv3!!_uuPgnCw$EZFA~3wt-&mR~@(I9$pBYf-i)lQkcnfn=dui!fKp`f=qMf zGFt>Mv~3KG=W#P_DMC)VM_j%4>g6vMd$p@|Mu$n8G62@#JE88MO+eyvu>Dd0q4p}r z*_wDCKkHd0uK2x1i}li`xrDIGkxl>2S{v!n?{=e@WS*C+Df7D1Zgah99)mCAHRME+#PX!(3lN1tyq=wT z4A#BN&r~(!hl?8D-(8q?pbPBoHJJs7`@|k~muzS?`<%BY3SNMFYl-# zSpNE*;$dCwjgys>^i6)kf_KLvz&kOo>VZ$g4^g2h;ERF7FZdOpHo%Xx4-x>mh95zJ z|G&Qk*S3oEGcz-Fb#*srb?`S+5oBUZl{ ztFc@4{$KCIbmON+V<1@XIkP&EV_d%Z0;RhHk5Kd@szVHg4sn+t6ke?YtZ=e*eNt@7uFX{LH`VP z^yuQ?DeNfC5hYr{6eFhO_!#y4>pYskSNdV*DC%HvK6rS&(8|h66ttI=%Cy&vI|72Om90UCr7>1mT5s8(#7L*CZeotBrN>eyyZ1y+y3kbcz4m? z-vfEW9v<~|b#Ecyu9c+N*w~Yk;0f+g-I}NLF)?J~p&BI4_yh!^1j|KeVf%`?#l^Cf zv(LTd?p?oHTwI)S7k&r8o%W^hPxSYbLb=HYu?J!Y7IGNu8gRMHF{b0PPqda(o9krR zfCnMf6Qi!TJs-u~PfeG_a3P`Xb)Ooz&ok_V>L=2FGr426Yed6D4eK>rI!RThXoL4Z zf2^+%$BEOJta5P6g<@7tw5Ju^!y9>3s}{sORA`w4DiS%(2m&pAJtZrv1$}_V7~jip zOlV{Z8)9#aa}htS_B@PZG!k5PB|W?gp&jRqcTImZWJBXR1eZCp-`6w51l2PLP|JP? zM$46ErF!W+LZau+=Gv}Q_oJR`^%63KCl{3lVv+O3mipCrU+{*qhztYzH!4Ls@KlV9 zp08Tsu#;Of1_r<4-;nw|U0ANUrWLkt`PuyYD>oUUo_8iJG~f_f*>(A;6&+44G*3=T zbFcz(rmCcU8N}ho36_>(W3DtVOQVP$Bs#|Z* zzeLHps63DlHS0g@i0LH|%|vN`Za4Nohl=1@0dJZp$=57}*hGUn2NtW5n!(AZ*Vktm zgb#drNEu4r#HCy(|6t@_DQD^g*UbT-8!9iDXT%o1zFtNZxGX%fxzTzQd37vPC2Qk_ zLtZd{996+m**lZV_Ps!9M#nrmp<4kB0ZJL(mKp;pt304=i3{bIYumgICnbo}q3k%= zLnN_OI8Z6hEj$$h`9sW&(#zf|)4A$uDQX)jgtU_L@|SfKiabuqpk*}sBu(z^6IGS& zVGu<$C;=?*AyPZ`c)55`TYzyxjnXG3D*#(2~YjfQBB=%Uc-N3od4ttKbpexVfi(dnjDP% zP)qx|aoO*D;_YcU(mOdDB9Dz$&}67?NX@m<*)uSEN{rrkFB&Lw@4G-`4dPsWuNcfI zBg&^zY{;aN#>#Us4ou&w3Nr6q^XFxvA=R`H4b%#FA1tlnsitVzCpKBH6?-hTqo#US zQmfRH!n0Ebx<;b*87&`E?4wSGru(E;y7_a1h~btRvq^RYgfcZD<`*=R~q$@dq?Wh%Bt%nbs1AI*a|w7 zm4RUOm;mts1-ZOP?fOaDIt19VbY`!y%b%Z7U9MYY0PibYEos;ZqDp-qD5jY%RU%k0 zf0A~;2pBOERR`qNsA0f|6F7vJ;leEZz{33b5<`tt32|_%Q`uU$a6!E)&g$#u&Sqis zjAgY}3tMtkROU4yPgRMY6rtJ|V;SYC56ie}1|EoFyY{CaiW}OyGFQ=o36(tAJ@tw6 ztvs04Ll0~YH<)zWeFiq4Z4e~I?>kj@U+>ZbVPZ^wLel_o!6A8pQE#O`*m*xGm2yt|-dK zogz9zqRwH56>=3Xpz*o*i)8CNc^iH>-a=8&G;LookL4Cin=-g;U{(gya0yHQBN*#V z-+9Djl$3?2p?)jnMYMI&ZTFvgu1Ol6gztlRnVYgu4ydv7d6NiN4Eq)WX+7u-$D5hG zzejcxt`LNOA>B-m&f|^isE63nL>{UhSZ^hY8QNd z%9wY=@rL0}Gm4O^7DVQ;35b6}ESjs#M4n=;_g0~g;S$;%PlI=3#T5TN(1vIx?RG|& ze?9D=$d!>9Kz$#HT;vNmrq7>$K4ItKfesHZloYtZd!?*Cneqz4G95ori}yN13AMYs zw@=c+oYS`n+4=%iskM8R1uwzArwQi34YnZPTKkws->Nji~nkb z-JKxW#*N=)Wo1kCrt}!YlB73}wlQU8L+;+ai|AZCw&yw$6A}pUS40VjfesufM~jO% zJXCarj#^q;E2~VlFdf&a8)YhLd6BDOKe4HUJCHUYvD(XAw|k|Uvh3E)k+~7JUI;{P zbwQ};*;OQkIPt1B?M0N7QYl{P~Z32{(ltt)fva$`&O@I;js25et z^u|d}?fNZ&B|_gU27y1YynqVGMFqIb!0}1ymy(7o9!I`}yT|?LvRaAB@yV_=Xo%l4 zc?lGXp&^M;o&Jqo$9=ST3k1{%9j8m#E;|&?kFc>5r;=f58-FfQ9GaYLD5&n?feBtL zqZQx9J?999Xtt42MeV`4%QxS zvSxn6oF~cKdM|UzA~2LWuf6@t$S}R7#DE7TE~@8b%&SIqlZvq_;??0-{jI3mA9y}I z=r&f0BuGqvrgGJCXGuOdyt*1G`gG9nz;-B{QxrMhhcmV+MZ?;@M`Fm{VbG+f?v6~q zn|1Z3w}^WEF8(a3T?nOX;hQhz#`u9l?S!oJvOxp}ol}Vpn3zN12FD^2R@LN#~aAA#Z%DCzEEK4h?B5E47AWNEtgHd_*&qz=gnKjQADb(QFEGm z=k_MMV*S*9_G1JV*GIwaek=EA`_b5Fq8BLfUVB69jYkY&0#7~Ny2Beu93_J3W-B$N zeR`OMwW!P{pnPjYKU$V>TTNAmijMm<|E2)R3pki=YaH0gq}I-}1f1N+deP}gO##jI zr;x2Gsn8DMs(8O+7&a3z=t_b2I)M>89E!MRKTF4dtw7I%e^Y_L8MHScesK~fXOvdL z`=2Ozb0TD9L-K^B?@HSb5*`W#=Sp!`IlRVIIznnIDh(#t4B%IkuaXtBaMNNuZPnMb z>gxG@b3a8e0FAuo#Ut0rE=Zo?x_hqjEly%-I#sJMF)*P+#$m_aMjrpI_IxdZd-zaW zGc`q9xfmU*O%H4Pguzr9TjZp60LB_Y5@O>;=?#C+5|j%@{;B>rwE^`fWpT_*B#5rR za!?D|4jL=|Re#)ZjA4XA0c+?@7 zrL9%1YoxjaPml%ZLv8RuCq9{T0U2^&Cu3QoB*ty~svl6uS&zTQ^{lWSmUmzUI0I`G zH4RXH$_lev+b9b73#qHj$ZT~Py1gje3k&?oi$@zH`Hd-UTq2oFK&+{qbykpzK|3{Q zB@Ob#(f>ppxZ7+8%_td4ch)l=2>hNm9J8jV&3Mf@_XB6hV@W+xIl8U?E~wpsh}$8n zv9YnNOtCV;7EmmztE&-O1T#B3_8-@^w6zfs-W)|GpTh51otY_I=_rvyH~gVG`u0F< z5TcwEJhbSh5Q2VxE%X^!-=$wG7rrN50kSc`k*4*V2KYBG*~?`NETlx4Ygux6eYqg` zZ1q&@Lt=9A?dxj8(VB*NzL$mj&g>cX{XG!KjjJyc5`ulwSSp|J@`?jgA~CVBShvbj zwHQeqI61YowaxZJ5kEa|d_Fwf&pobc2|I(9Is;!59O8&^{H>A~UK5h8)H~E#bO(%7 z71>&06own{+sY2Et*uq+-D{;K2P(=U3|8D{W;Ie&CeR$DD&e}f)DI{*i;Jd6fydDB z%gKw8zgWun$ukL#+w$k;=Hx&pCRSJS z7UIDkZ9wVOYpidSA>oeuv^__akbqBsk1v9##B&{Cob2qJY(v2ud_Vyj931TJWdLfV z8mzLia%fcD09lwTb%t!V#iwvcqA9n5(vvA=yYON#_RlsZ534sy@DzM`j+{*Rz-0R1 zh@or!v&7~_A{)eyk$}!zc1e*j9Dh(HxYmnS2 zQ?TOqoZ+2SHlA=}foXlWR3%eEZScKDL5yHfaK5hOVmP#L{B%b`chJ+qwbBmc>buNx z5aoj#$vGD3UQxcaCugdTD8y0-6G)(9oV+V>Vq(T`rTEv1l(+=1Nbhl&{ZmF_ z%pZ4@l_tyRMfXl^JQIk1AraetCnEB?X9k#F@@By6NbZfeRO*SSr;(G6pvUn6js2L2 z^_XXkn#*wVj$e^_4L8NQJTu76fiJj8u*7?Eza&)LEAw_IN0vR2%Af*hI`-BQ|-sIu32GbNaWR!8W# z(^e18lCO$alRw7TJbpcCPsf`XR0T_xqnUK0FIFk$$ER@Y44ftz1ZBF6J;!ZUZFwp@ z(J1m+D_5$d%9X#Gt9MzRlGFW3fC!h!5R#C@(EP6}mRH|`b?R-&TlvSRtcdGQ%fJ$- z77Y{wt#4CZm_4n=d~o`o6fe-5t_%@MG$sGvHWgjoZV{Y1uvitC!9`TPX-tCpIJbYN{& zxKz6lvqs8lQ4!_EZDx-XA6ap^ml(rgL;Jc(kdfQOFf#U54)Wom=4)zbeDnzk4RvvL zt}CQXQC{QlHdUIAu^XhvpC!YsqTDz;d*x%k6LNSJt=G{In^tspzRzdJ*H;%VP!+W2 z3SeJ+!Oh4h(-99Pw6L?Yv$n>v$x2K~DJd?tv9iLnag&jiMZNlRWJC>t-JA2^D6_tl z^`)iz>x7ZZQtUYl3$H4(U%_jW---y-;b!>%f=Yd@j~%v=HN?g!>L|8INKQ_EDfE-U zTy#c|0Tm^`un@B_d}FCUlYxPux3?EboLXB&00%-D(@sMZC_hD`^MHm2@FpZ)DN>B0 zy*2O#ILvPW)}*Z`DP{MP+uZ{KUF%tE0P!Qnmil%U1D)yfryl#om;!>Ojprp}Sco^G z(E-hDa0FxNVqY$m#H3NzJGU&Q8A*;7-Z)~!Fdim}3@WwEVjj%=p?7=W%jBB1?xT+d z{%o|EfKjuaB;@TKqC%!dI<+=wU2O8B{yuk>OCIKQlH)+QFad+y&V_2*wkfE|b9Nh( zIsi!=7R}H_Z5O+^I7$Sv22GIho?vb+DH zJP6)BFnqZ)?mN;%hrh7QnpziCncZrC1I~ef=N9u9yERF!25LrxL^Gonyj(03v50h! zf6BQRZ>TD_7`|e=Dz)BfdMD`i@YBr|oxKkrXYyE=ImB6nu=Cc+7##W_O-*@^wcHgl zyh8zrqkyU-qNd>OTIX~KexxXJWvF19VwhyV5iVyloo5Y2`YfM!Xti09UN5ic1$l+Z3$%;>iTx!rb0 zULiG>g|rJ?byj@y33+{3zf&#nGG-MrT*_i!F-RHBhZoo~KrJ$1Fx)-ir~nwgo`;!Q z5#l#@-E`3!h0yS9#HP$_e=X8n7AOD zg^kMw-{3pMo77am+Wy6SH4i&4Ec+>N*E3`X)7JSQh2N(!li3Q8L7+hgnp615{MiP1 zHL#zx)Qz*UvlrqQ^*o>>=-xLOOMNQW@6ri!2U(>p{lEdJYE2fz89qVi=EyTW+zU zR>$w{Baxi7K>9eBVOu2xOPZchP5(Y%8FtSqTu}~p_zH-&_uevjA=h7;PW12BY}Z1$ z3l1wF?C*aG=tNwKU-@U53^uu#$-KwQWqZm**gXO*5mDp!s}S!hm`G^jC}${&26Y&A z_W>GtDdpRtXAuAEh<9nPTS#+Au|aKc?KJhK;k?*@>r38`E5!g7H=s_gf1!Je#&~j3 zOCF!FqT*+-^NAWr$pMFg?LXM~1wm%;ewq~j9)%^Y70p-%n;4^|>?G0#pRMzcn~ujW zgn#Z)O`Pjx?%}kjJez`mz-~P6W*y8iqwE>rd|!PjWMx%oPB!(A-t-S85)L|kufnUN zX#lTU-5mP2`&=??rI#I6tCMcAHTtXptNIP9#dBMiYR3B-s=|gJ0wLS8E^=v2O=1NP z3d3z(Y^z7g3)Cv%Yvm(PE@Xv(hl&6h7+6lKS1oko?0W^--mdWW6H)WHtH zqena(0y+4QqT_Fuhe=z5r={)Lm_;gy(N1O6c-`*q#sT~Rprp}TXfE>^1em^ z@ZuQlS6JF)dAM=;7+>@Ycc9k`C=mi=fXog2_$^WE;;~`&_aKY#(XAu|Xwm?$@w?cH zm$F1GZ3Rg^q{CAqG0?zXJQ-a)X?EYk{`1B2-dbgwZ|ro1btIzv72A5W9xd!w8ZM zfhDYjv{3U57gDQR|Ea2K<~(``s9Q9%^9nyc?F9UmQ?L?UiFu7iBVR^?jZDx%KL67) z7BHU5@JoZrG$|wlNb7nMMg2>m#c34GARf!YKrU1i{VaxHn*O}UZAR0W=nr38(wB(1 z9z1#d2jUWs$ZWu3@Fx5_!(%&UKzzGH^&0WmP&BUoS%X{e>AXL>LZ&&;mVVFSN6!+j z+xz9qt9>gcr^>>@Ze7*wB*PjD`@r&suA0Xok`clMS`CBPy?sne0hH){>kQiOs&4f*+X>FIii<^3Tg z#n#p~9Z?~(v$LC0AmEHIJh1vzj(6FQXOlz(xYptM9uhOZlAr6?`IlCEr28dcIP-LL zoSmITkcp2JX)3FC4AO#tvaFS=pO~14^dtfUZ?3jzDl13*(1|Fu_5WB-Dk_5fNgm*C z`OhSc{f(t^W=9XmC2W3~+p1!B*M$&itpNT@caWw=xSsdwo4!6PyXIAEczzW)gt$p< zG?{G}UT)}b?j0+ROprydSpH=&Pbk$-)-&W@l`SRVWl~f9h%f1Ywq1+;vUp+sl}Ug3 zer@=L6*88L-G$C)SZ5PNA?(>uDW4Sy55SRPauXINCgw z3`mG1^w{^1$_CZqYQ!y-QC!7s^u07KtHO_Ei$S)$ewJTkGKzjtNVH8{`|HW!_|kkP zGM;kBZ61iOfcYBcKOr?s1!ka+X6?9Rk(~5Sqv2M!+~4;Gu{09!42cvM_mIiWdJcom z^cPng;}I7u6i;_qnXMhIWiJY9TUmIpU}L0IDZhR*C`J-)7GBRhR(n-;yWs<=YA9eS6R?za z39lg~N7|b|+lL44!Q4Zf23!wi^!6@35dUJ5KDGfvxPvQn-9+Qa$$UOZ#5&pMy%sR@ z8vz_o@Q_MbaT~7`ag78RA%Z6-KI*9J zdk=3+U5c^=8UKe`GftW@f}3YNvZ-rD7S&s_+VIdQ{P@+*{Efr;^Q9kE($d;@CPI1F z5IYiQE$A!2z6&iS@8G68detTm4m4N}qdG%oYo_(s1s>zaEd2276sQm@1fUc3>FG@+ zp%5_8aoDd6<@@{J04O?7hxl7(h_0&*ru08l*k70f*yrzxrEusY4Frs56ICC;4QHC^LBg3uSO9cY?v)Fk{Rve4!L zIh|cfrhD932NcF)3`VmyM#wcjS$_T%A)Qm*fi4piK zNG%{dRY^vB&qq}ox7X-PXfGaT_BTq3h=O@zLPlyHW;iPKEFtw9g}ec2Z85`x%CuH% zAf+M{GB!YYy{_!t_@<6wH;-;7o`+UkeG539QTjzk_nVy*Zsbx4S8xD?=TQpfRe~PE zzzl0wx`MrYQdS(rfCk4`-^4gk1*g47muU8QIs zbl)W83cI?bw!0NMAzS5@zP71;k+-;YFc(o4^rd`yu`to0Yl%Z%892f4{75|UZgeM- z5q9d+jMxBjilqc(mGD_)mbHpQTt!vk`pVRCte>R9+7=~oH*5(x10G5-+mv-`51ZFy zbqtu@sdJKLO%89%wpLSO4I5ag0Q}R0e34y(;YhJS9&su=B#NQ}&R$!FwfZ`c7~J>+ z*C=l^KhH35S!yU{J<6cwRfbaDeegE1vQB(?TXq_e%VT&k5}EpsyeT}Odqv(#e}WNSLsXX|#4qM^5(OCX zv0;GRx4ym}5)zUT;sp3DRaI3sHZ~b|!+=b)(4((VC@maT&XW1uch<%$h=_r=(pqJ+(64TIjLi_UZ7fNiR_W; z>c*i^oPpsDQ99}sQO8zVF_p3r;=PjUJVH&c3 ztXlM}{=d>lkVy9ckz)RtX2_IcL_DD1Bsczw{lOr8pb13v^D7sEmPg8^B zu+-4tv2m-LI*y{CzP@3S%2lo5;T=xI+Dl7%fwUo){=}==4{E7Lha~3I@Lc`PV7F6lk0Dch*+& zLTjd`-XfCK71T6fA~P5v@ zwe}q)3=_{C|8D*ox=44fnHIz_`t7I(Sp-j)TCQfe%Z!yhoXf$Q%pzBcNqXOcDoVBZ zfwVX(j`Lb)cauBf8`Bb^^`I;m6}hMsrq|pbUbAeC-^kXGO!RcfD>FW6O^Vr6Pt_TL8bS*QSUbok1spKPn97(M zu`f@B3AS`5iDa>)>{qi0zbb3KCl1a-u z`W2{TSOklXmq1zlJ*FNo0<}+Bu?=G|CXauD>a#7X=oMW%Zydm|;bIMpEH~lg<}$N~ zIJ(K+@b=Y-l<94J8hRU#0@*Nj$^H`^eGf!YB@#WOiD%|*6!CvCV*YN4{NI2+9Ygpk zN;3?vR$(2$Awhbdm7+>PzrT=s?3)zTiIzJB*IeiB ze1%82N*XPlz0-g!_pAL{cG-%Gia`(VpRwo~fz)EnikyxsA zfiE#JTHH&z>;n%vj+nw=>s)sb6B8cTz^?fCsPSavW@_r_w9n}Hd*nVRKZj>XX=$o? zdU-dqs79Rn7f@8F$#$x9)|Nv}&=YjgE21}yIuB(p{Exzf_k;k z@|I*~`Sei{ovr|#!+zqSYAj%HWj*tCCQW4eSsW5ep2sepN89 zc8}AB`%lfQ>t%j^X0sQ<67;*}&_UEJ4pquW@K$8wp&|Jbn*XwjvQ=u@fIxMX0T3=Q zwgAG>8k3rv$Y^%RdudRn_r#PgB7eXW92q%j?*f^<(;uE?pfNQb#plPIS8(n7muwf~ zendM75555+qcUQ{i%>S8aiV5Ao~g=A;qWiY>Jd6ftV?&k*J}Tg-z_rq7?7zdg^Pk+ zs4(vfN~u_vXv};##Y{{TPQbEf`p5`25(ffo3M)7n1#I31$r=c3RmmQZ(SDyk{o$d~ zE zP~2h+p&5sT(E2>ry&!a>$>>*!(IN$rQTDZIeyxP8SZysRVW(Iab} zWu98km0)kVV2Txmyb1|rpl!vdTJ6TaW?3RtxicccWo~{gB^Z<$cqWVpfnW2W4emEW z(B;&;w(r1>5|^BgND2qcJs(%`AK?5+{+~Nfr3Gu&@nM(!4KL|W@AScWH;PI)@5WK1#JpZVwXm|XGO!w}s#Fnb+wUDa8fC;f$y3QckY`UL7=2`i?%yvE*DGCSWCqz=|Hr_5R5yxxG)E9x0Ig zF$Bn#KVz|_g@8-;r+=3Y_;*1F--_39QAW0x7J&!rC7|lSY!(qx4WyW@^3$aId#e3^ z&!qdEevXj!H->BEj?Nkm4nP0|LzI8P*~sZpjIC3PoD$^vSO}o4%kD0Y1i9Eu#5=MZ zV)IevQmWUK0=Wh3^;4=N?9$uGQ8B~ZK-ge^-$@SGRnr_FA5~RV$f&1zxLPvtD7Nc9 zGF!k!r3epuwK(2oYGkETOXtzS;mY>re+*v>Lg3oD(3xN)1S9AOkl99p%J25PDANqv zF#oTZdhLsRBF$gh-vS)?|A2*}kdQZ_^cg^QY-L~zqk9xC5FtCoV9AUvd$GdupbAjr zDA(_=W=sLQ>Nx)->DIRQER58zWRQLa2o(rW9rPj>`f%3& z3~7zmB?z9(D{!SU^B^8Z8cVbeG^4{AJalq{RXl@w0yA6T83JsCqqnmQBdBeUAaoCUQCy4(yz%qwVj~CIj|`+;wBz z2&LRXuaWDz!XMKH>_r6j3MR-88QK@jYw->mfidcCdNhMF&oXcvC7f9aGJcqrGXH%5 z?mg6j9Ndh_;wwBu5{oV+fLMr57l?r<_+tf(I>rt0i2KQtV!wU+_DE@ee}72{qw8=Ge2VrekHh((m8dC;yac0QM;ZTR;%GrGWi}$&nE;n6Zho9I#i~$S4!x zsvvi=Sn<~Z0>Xd2Veda>?q*see=&DJx`Wr9pB@=X?VIVdRi=k?Mu;tYlmaLHVSEQ; zHKJs8$XykPsqkCU{!3@5NTCkjDuIOvrj~VmFNta49ZpFDwd1X*vJdLUDorE`Tb7#E z(h)gGsMd7BMSVAQ?Pzm-l?UC+EH05gMv)+g!?lv0-o}O4$$;)_zz#tJ6NJneO;#|k zcV|I|Vw5k9DheyOY33$9Mh_`_20)v=C3&+19$1cH^-^67btEHpCk9sJ-lXw_$W%O3XhRC$M_ZTzqZTW1rMQrh;#tCrYJsL`$&n$ zV4xJnZ7Q*9ES8HLx@R$8Wikv7DY?15J5Q3iSH+tqInTZtJxF(@Hj)Vf_SH$wzPQkY zM_dg*Fh*Yy2&9J(r@+O%%eHY z{fdsKWLh=Vfau|*|J=&_@HZh0A!rggMZJi1)D#fHxR<{&l99~e@sAxG$|s7wMSWi| z9tkE~EN9v75A&HX>u6%YcL(y_KQ@JhI03PIKF~5#=u9;Mdjb&2 zi+Mx%rZ4$^ZUMO@uKuwxgo8W0o;-TlSj@aXgMlE)8II+=K4)&q%8tUqjR+KA=I5W9 zoP34=2Vjq{H-B;zJPl~NXbfnLh%9|aPtW^(?vMCCT;2vigC~KJ7yJ+G-D9s~ zHhJvs>WP?|3OInj0&IYB>cw6c5LEa5nqr}8Wb>!asOlgcr%h2)cJ3`M$J}5NfeJ!4 z!v7|;#uMad=D5uRtAbso<_Ni)t^R&<7%=$2rJF&L^7A#@#+%ALHXB)iF0SDJly{zC zO{H7kcg9g%ac%cTYalgN&8m;+>7;sRAQzKcsL! z9pdSp-)^vD46y^}ZSo8jw7~|G+H&sxaLztL2KDbbZ0?mi)ClgWC9UwIH- z17CgkS`JW8#g)EVwxU^5+l4f*{DI-wYZ4s7KrOL2cH>;^Xnc(=#Kr}~2eBT{{rL|d z+T{I0lC7_u7L1*@nrq^;#*J{QMywSe;GdeohQ!z2&9Usb4zV2je%+=8FuN-Wo4osyaw zOG%I|3KuP~O(nBoAZKvJ6A99jOgB+t0cj4+Lo|*^>p>a>K0)hdeQ;2Wa;}St#?YC# zjqH^IvcbLR39D`;M=8&11eM|>vtMMy>F8U)yuzWf&YxuZ`#?v2-hm>X!;}?Q@tB8` z!fOmsT#}Re+TGXCMhEnH$C*(=;_j?TzK#I@Ha!F&iI-)cfvO?E8!?-H!PX~Qs5H>v`6bfxFdo14N~kp_>vNA47z9PSn7%X5y^mcq};(@5$Yu`t-EWoV}Nke?`&98vC<*d=66R>Ot`8# z&|CP-8zazRrzcgs{y+q9pK1zgX=wp%_ij|<3-f&wm;7*oWDp6(W09gQ^?%W3)zQ`@ zzb#zM(6}c2hLvGwM~6Y$Vc`5p7&xHw=!*Y~s(2_abuNrPxCD|&3ZLl?0n1h_W93W6 zFEtnb*4Fnm5r3wf;R3RsCNFa5`GaNrx3MNj=_*sq%2s7biEbNm29*0`N+J z?>wQ`W|IhmA&~T7V>k%FP@5# zIm6X<<~=8J)gLm7G<$|s_klLm>pVM&mt!%X>V{ z8OkVf2)fqC1ux?`7>>0(P8yDl9eONSW-J802x>U_D7SKUVN8OdWk4J=8-pFp!QLzd zQ%7n6R@!8d(e^m}AW)q8#|XNO65@Hx-2Y3)5!FR3g(cfI~Sf_55# z2s+Q)#^7fO;5k~N$-(_(>659=$+0#FiLsZUhdqwx`I<~ zHJ^Q!4_~#&g-4JXVg8$PBEVpu$lIAT^{I`@OmXtS5TUWE%kBwo!4fhe^S4{{(awhkNpg=`Jfxt7In5W3@)d7Pu!C9DL?p53ulWm`KA<$hwy zq|f8_?1?44Zy54Vm(HE2uSTB_I+peknNFArf~kp+JZ9*00w|{PTT3>oo<;tUdKP;E zy3bp;%Lhlg%MoWZ%*s8ohb!q*bw_O%fZ<+mo_x_QS2Ig97-(r{b~x1dX;w(Ahb3P@ zhB;Alm@+MXF1aLp@Qm?jd?)fPdg$v)W)C_WnY`pBO^y}|gCZsZQvLGB&i0}7jVtQ4 zJF#^&B;?E?-DxY9y?KP`1a+kHKbQ(h?p5%cI-ETT&0w^qwUaaj4qjZ2f1|$t&3}D0 z=~Qp!^=;k*bN=5r0H|vh{?%{)sc*Hc?H`6{zFYe$%gej})i-mCY?U-p=O-g_;x;c1 z`5Tfk0{;XE5c;eAZ%apj{E;*OJV&qN{r!zUqns`1R*`?yMtRU__9FUccfm@=5%t>o z?GxnE^u3F+rkLTd{Cg(8CbL<;l{g`}i)|vBn-57K zgG0xIe}6tAb`OVR+#5H$A-{lbmRKc1&N^fc4GkH!=M5*buiqLGE^I;Tj{?kcbTdyxjot~Y4)i{T@hjy<+1ZtZ6PrYMk#S__K>z!*sk7$GKuvkx z?Djz=T;wW-XPZA})EM)jR{O|pP}9628^AQ~KT|3*P(rZ--w8P$(%*a3&ZNbbSHVA= zSSGuu62hoS|SV#5o~d8Ie%3Kn`pAEv$wGmycK$6 ze2tBqH2Gep-~V1)3x<$uYp13^YwHA1TXQJD*?-6^4+O%+rmG?xOed7*-k1l0A%y=; zo+&mm`J)$+vXlK+AJ>@J-q3;xcxli~dtfOboSmlY92GpecZHh?CF9sl(lAfhRNWWM zS%{$~_s|hk3?4am*~o(9T@QU=P`KarDm_!i*_LDL%FD<{HfKPzgzMUSJ74=1`@zxV z$zvx=tug__=U0JRc+R9+5pkQ|S1`rD&hp@UF6ZZePd%IOY?4w>Go}>l*@NnwtOf?l zNfmKVC=2@BGUqJ4=s;c|>1}a3!>md^EtYnIogbdvoH@It#ZV)P(E0qw*=GJP)G$AF zNo#UDhNK1p>`?3tho8JH$#>;i7FThZyp{;Wn8=TSgW-^4?RQ#+;u0n4ORbwuGN?V& zW*`w|wo(VHzF8mtAtkMN&W-w^n(tU5k-g#!ov#Xj2@Cn>({ds{Y)Z@PWUO1W*0RWrMHS< znBh&n?wo%r=RcECC0y5m1D&HcJ|^j#>#_g;G++H4`2p&|1&=PJPlJSdw(L1z3E~^1 zeF2=%`h77B`~ZyTCXt=x*T*ByS<{=XHUM5n7UgQL)Z)5`>Yjm-b_L13+3FNOZ{DL` zN~Q*m$Ayp(+}AlOWUh8LBO~K{aslYufSv+iH+}-SC^;|1)(1xG0n+WW|Ji(Gz9$%e zKS#nT0^CdknSN%p)XG8T=afjZ8w<3PWlG=~KQOWyC_OpwKK>PIY5DNrYbq-WF88}D z=%5>{>1wlm&Gt2LAjGU0B^}<~|2DW|_Mct+|NU>}{s0=fkxOzeVt898QykPk8WzyC zN)(a`?^2$3WL45|84$tLP3Fx&)eG4o=bgqD%<~KP!{u4iFP#)~J`LgE7=y)&f*=9#d);a7Q8)-D$BoJ^VS zw)A8ajO299nwOo#LNTv>@nxfy+|-&&Y|Juq+c=H=RaWNdxL^ExT-==3J-$u%NR<0|q1J2|-=;+~ zZvV89e1rUh!wxsG3>03jkj!n}M;a9p+h!V#*OkUI-{2e1C3qKF))`H`pwXSmRZI8m zN!63M$~>)KK?NJ27VWY*W zQ)DezvXGXox+lf_XG3Y=;j-Q;AX9Fpc3lBjt^GyOe9CK!=1*F6+I%S)mnNLzBgdiW z5wRFv3J(0jCurDdnG4<#Se5veK#DPYDG#lEbGMmv-sbX81BaIQ6tv<-UF~T@P{n4x zdqIkQA zOodNJUK(13$SPhA9L3h7bd3rL{ z1}>QfUr6?f$HV>3vIIu>u_zfUYk3sixQ{=dyjyP)*-<>Rl-WpN;Dk@-#=pbd%1u;3 zI}77;buE^c4VC9g#%G%EG`Ky6xkT|SFxAOSJyz1}vVNK+j@;#k@1UGcsw;Np7(&b#e*M}=eAT-#<-voHLR(k94qFB!M`88NHLy&+9NzwOjvB}Dc^j3w*(SZ! z$>r%KIZ-I3PZ}Bm!Q#}d$##p4_|J~8xGT$(l(aiTeGJQ`=l@vfn_jb#F&cHx#281d zTV%aw&vzZvj?=#Pz9;X6=dy%dptg@S3bVx_!D5ioU43vZt5prXDPW-JTi^nY1 zduhn)cB})E7hrmc9eMY`%JodPjoov$CC*+P+7*}y&>@`DE7s{&`FQyYe25|qj*sh9 z`FJE?gKs#H-I-fS?fs&SLeXwLh5ls;$cD%L*3U**Whf>~YD1+`W=9V*;xM(IzwO*e z5MUNS69f8NQ{#1e#Q3Xh6%5qWu9#MPj#Ad)f=maFvUlyYhEMJz?Iq`e5U>r05PT={ zY;$ziZ&6YieT26!PTJ8DTg}E9DJf`ZDi)aZ|ImzJ-&8H8OCe&{N{F(&_|`l68AV9K z`~xF-A~F}$=&>=4Ma;DphRLhaC{9z&_a8s{jIhivFePR;dFWJ_8IM9Zz|%DwRQ82> zCe+sOMnYGIms+(lz9Zl|Sa;r}br;K=ZJ0JD-|iR3+2yX$xlGI`GTSN8mrKM~RL|3X zG_wFXTFzjlE>t6VXMfQK`6U;3x__y~qE~{gTXQ!hR#rM?njmwN_Z2jIP4C2BjheDf zalH&D&klP1KAXgJF~~+CJg&m&o}=_;*qPijdrEQ7hcGCywgBAV$TK6Sw>h7P=gNk% z#D$2sT8pYK`jcq*lw`tuvb?1HFJMKX*X<@bK2UUBR@ee3AC=bTM_FA2tCz0^D~h8n zsy7B*rI`Q5Y|MjxWxFU%rvEqlmp#5&#T3nOLuCGlU_i;MYLE!O`|@%;cLx>55t=*F z+@g(5+4YKAzx8%8V?-)@s_?{a?dL(3TLtE+C1+^cG50=E0P$`2?F%HXIh1-29v^_q zj9;xJ(r~x;A_M8}__gSs*rOSlQn#wL2)l6EuZJJqaCQs}m^$LnQyPn6@6YLprz!j< za9!FrVMslV2|VmfHJ*7mA}bAvQj!Ffw$~> z+aXTVb@q9_-aO<6ux|$DeWb~l;!U;xqWp%Qmg{M48sE^Bb!>@J1j0( znVzA#l=qu0x16mf!IOJL2%$BYL0u9h^BQ-RcTXNbY{Pokw}^jmrd{%i+D;ioXf6as zeF*`8h>S;x7i0qNZ0&Y*sA!Z2-$70HnrdRKelU?9)CqTQaP-o)kaPj?`n$1??|{_* zOkn+g^jmK&{duW1DX6-u<$$m5@lp(vzdVKw=p6S*o}D;aAgjr-;;Zedm*W?oavRyS zkxd4}w%V0#mO$C&k|hZk>BpO`iZ^Preg+8VGqsXjpc#<!dv!hWLF=PxZdsvP zxxdjp(oJ3Btv>~>HJNW8_X1;AW_8enh_2;GL)Qg_}dl$aoik?y6oCZzkgwBS*tGN zWq+e*&En@~`5T(W>VhE4hw~R=61r!`UueU#prxGCMG;es6dM89yOkjb&yJZH7VozX zVLHwAe~4XeGZPTi^}Wh17IOhOGCjMjKw)u&4C%B{QR?7qyNcjq6a!|;a;*%xrrnoE z1R+Y;N?E#XR^d2E!kOh_OiW#%WJ2jY=zV-3Pk?Y)SxRfFw#Qd8OgD#7X&simU$O}k ztavikwkFOkJb}D(UL+LR{l9Tfa<9Xskn%CEpK<|yb z%cMqs@~)iOIKvItCbOF!ze=7RLYtlAbcCqF6C_>QTRWvKC+4o)xaId{{bn_ZG!=^P zQXiZ4>vslir3*HSg}h)<98;`<#-iudnoVrEV}&l}KBd$H)By4W%;gCtY2xILTO{(G z9V!@4%}`SUgPL-~&e%&+$%f&=yG0(qIrl{3NbXKur)g?Kp-3=zf>Z9a=H_d(DS zW{09il11yfqvVbxD5jM)p55zRGO=cs@-E$WRZAkyq?Qj)jt)IJ23P}UGJhzH4yw0n zFTkb~RtJjie>}l_V9)#iXa|Ts%no$j^;Rcysx-s_n7VHaF)|0PPY_l2Cx4I&vp#G{p!F-iaeM|p}i^0f+VJ;eAR^MA{7~hUf+n)w> zh%sR>=|pTNdh`MV6sAw#d=>!&pErXCTY{uBricm=D+SU5939lkdQBS;liLVrnqB$~ zzKbZf-|0#iTIkJ|ml#9Ku;9lgs3Jh!{H34?MzMCMmKb@AaslO7un~1lx=N72_QfSF-e(t>6VS4+W?n1q(M(FE1yW)@S&9g@Z(#V-pv60ZT`MAxOH1}X9w(ma~ltK zkz#Rj)1Mh_edt51gJ#ui4Qe}LO7xfO^nbb8e|5bktt7}8veHbS7PmFrPDwMYzg#oD z{Lwx7k}B9bM2~mY!bil`bjC!SAJR1_Dk+ZHH)|V*jx}sXbcqXgjzbeuA6Y9<>z#z+ z7MqccdbWm3uQA?w{w!jxr?2)TC@k+@Q$y0t3O?O=FdV#OyJ8_AAnBj9XV8gf_yQd@ z%R_=3DvPA=X_y+F`_&ig=$vy}g}w=g!@oUhZ<;9NF6$rY)g8RbvX5A=)2Uuc{bJ)| z3R4)pNbC2EX-CC2v$4V$QHj`DHBOdY4wP0&XB&K^m@Lrevl@k5ZUhYnzRMnI_(uU_ z@tD_)%qc|;D#R?BLMOi&*m64}_$~f?P?)!mPk2_=r-6aW%F3{tgnpmdy~IoCj9N^lB3VLA*FFw0(l*lnVV+3&PuyJ2b3Y6J5D3U-^fXYjp#seSEaJ3C4sJw-vVrNw4Te&sQ3yZO^Uu;)9 zAkoki_0WebPq)Mm zw+dv!g$ix$!6Ns)bY*BcT7ZM_{lF+b{i`78Eb8@*2I$7x&9J_L``(FQCsZ~pt=&-8 zG3lSxqc|&->?wL5IhbRcDU0iflJtJaQj!lH%($2=@U{waSqxXb4(*mqoC)0Kv$IT_ zH42b{pfk^m2oIPrpCCrr%~aU;QZ;NEUyZo=Q;d*}OY7w|xnBguX2i_6SF^j4cVcUC zv0Jt5!Qceh(W-p@r{;o=&uqS_n}>nW4lJtR_ALgm8xVgJ41(Ks+NeR zFZ%UML6MR>1F+!~eh~zeOWoDxRGOcFEhzbap?;!mA_I)N(-f*5Wa#spDGU z3Fh>CdOyuNEHay*mGr@ibE_<_HH|RnnIE%xeQVGbp`_E%d85PA&_le>1J6Q4qFrlO z!Jy`liFaRU{Z2CxW_RXVTxvObOq4^VXYFw!B#RgsBjQ~TIFn&jR?QX;zqz@Wl1F1YlWBeEWsWBJj=nNkCOvK(k4cYPWYD_ot+aYV;7X+7 zI7P6x_gGy+_g3`nI=j7Lw=`%1U8VKSmuoph_9!QjQ8bFKc-wOX<~lSTM5Q+9W4wZ7mwpdC{~$5n#h%3)AK*U6)o} zdv&9DlP<~!DQE7Cq`u!{4>sRzV+;O50eO70dc@yf?>A4@&M&v|J)0Wz{s=8dMZ5Sli6wZCTqbg1 z?BgTW7>b_5IMlM(w#gCOTmjKko*bhE9Ko4htrr(dK@$AH!&{6=he+0th5;bg-KOZ98*t1i7d(5%nP=ag3FOAMZl+T8U$4nc->{a?L;C>flNRi zplitg`cJtJq_-!%{+56LU%uB5P9$3L+j40a9^aH9M%4`By43^kv@=3>r~GEIdz;(n zz;r8t0AeUIenpCf&ek_ zno^0AIi3)fg&{*e~y@EJqFwi!ipU__DEJ#qQ-16{S z|DA|a*G?q5O0iV7i(~(D6kl4E{cEYy_BBE@==cV8lj#gjFUXbf@>n=b zEJMbnZqy}v!6f+6%(8<2Y$UwDAFi~=Q&>wt8FfXri$1iOoABPdws zqp4Fuq@c@$;J8b5){re~y#^Ji-qxefjCD`a#-j2dMgkCus)7Z(^5Cq6TAati zYguGLr0DXY_ihR{LPF?m(?y&>3v5>+k&z4QeFnt0fC_ghUBafT%Md?QuNKo zai}G~GY-WHamRcpCBiEB4Trm4q!Nr~*^ zn{_>80{RM3`+JWeo5c%fb2krHP5;I@y)#h8>^)rSvV5H%^C7XhAmhoBj5M!dO?hl$ zBhL6Wfz5breR5*QV5vhDWmnw!$bGnYcIl3ZV_e{T-vLP3{=%$yj=& z!hNZ)8~fzwbtamRjIC`6b?s-EeiS)RguQhYmDf~jz_070-W;*v0~f)4uGx0kp^UC( zaV1p7ZL9Avn-3J>yfU*yk<412vaUdwZ9eQmInrKOwXeEw=uU<1nQMO#CX6;7sFxUt z)8iQE_Z#0y9AJzaDR?kku5*h$-zv*Ogs2TwOZ{9C6Ukjz7SmxEw^}zuoBQPlZl9PuT?ut@#>I4jtKjOCkMqHdziOPd>sSE(3jidh}P9 z&>ODr9aGYG!0lOlqs;yTgX-HLYii(20Dr>&;*%fYezh literal 0 HcmV?d00001 diff --git a/docs/images/mqc_fastqc_quality.png b/docs/images/mqc_fastqc_quality.png new file mode 100755 index 0000000000000000000000000000000000000000..a4b89bf56ab2ba88cab87841916eb680a816deae GIT binary patch literal 55769 zcmeFZRal$t)-Fn+z*nS{Vx>rm6qiDAOL2F1cMtAuDNvx0;#Q!zyE_zjcbDMqmSlzR zn{)pEI@tSUUwdu2)&Y>bJb7fuJ?=5a1EER^lGqq;F_4guu%)HMRFIHRN0E?_z5hZ+ zJaJ}X&O!Wm=At4gf>b&}x`%l4+)`Lx7zwEYjQMDcig^FRNlM!V3F)=#)7P^V3xFpQ z(!7JTn6R3s!6EcTteK|QPPjx@DDOv5T2*CXB}Z%z@|SP-DsObzPh`FaVcdV&m0)j; zcZ>LN@}*RhsyUw6to^1IV&KrBgSL*D84<+V=b92tLUGmkCzrla{Dr!*h^X~IGAQjM zyD9lfz=>mTe@ql{QdCq_QdAt=(BA&2YBUsY=dfzD{{p(Xxaz)h;YCF8?Ul%1e}5}@ zO@0yZuh)nND%kn8|Na%lH#NLM=KqYOnC|MbCw}whr}=*yP7H-Y`-r9qwQ2rq9Dz|0 zBdN65Kl4A$DgS>m=QkV7|7=EzGh^Yu&HaDh$NCi3wnS$c$@$FVUp#HFss7?l0LJ~{ z!`SL7tNPPP=8^Kq8)3(i@(qbit!IaRj$Duu3h(VXaI4Sdu3~_@H&ak|A1shtFJP;$ z&Ff|ziaT$FS{aiU@Te#m;Cp!+I*IbJ@XxAqIeeeH<$>FQ&-YdyTH@a_&X?%>7*prF zp2!e%;=M(CLssc(k6U1h(+Z6N7fk4b1$pU zx+k}@k}uu*?&UWT+g}Y#gV?3_XQkIe!hs%Suq9Q))|Tlh`Wr-J#)v6)bNt9IQZ-?zd%Hw*=ZrCzD^f-D3r^0KBi$+ip$`A6Mk<3rtrZFNxAf zKk90T99Gb#t7ndaGJ(*jcpaOR-2zFV|0MH`0H4>cX|8kH-A>yB@PzO5QPgAAeG<9~ z(7IdVikhJ^RFhx&6*~Cd*30U>;FKs>ES%nYuI$%8RM=1({ChUX}X7!Wu zAA=&In$O5ezi+pM8LtJ8`oW`oa28+E!&*f>9{W97;k4XXkIS^H4+UAGvZx7D{UOIK zH$}ZEkpj2NC%)GxA>My-R{)`xdTyO1fcg{J)!T^@lJhkw=vrQzj&$^Qa(I7Cu2xl- zg5af(2k=sEQGeBmBNF1c9B_MFCIG7eR|`T^)>Jws({-d$>S9rNoIs$o1qKW1U(s7gPai5(qrX(&Um zwy;AI@AZ}{%d9#&PBP>zwc8=%jgWWGH2jQp`DWYPw4k^T`^Nvelzg_m4tOygvshAx zSic)*_56B2$iwR{sdtKA-$NW8Cffewvz4#abf1JwCg*y2X*Lu~6edkmydt&um&!Yh;0Fgz!I z8S zXW#cIlDgIR7Kgd*mV>IL1+VdR*KujmVe6Bnrwi2`nyj5h(N`umHB#h26X zt}BBFa)TAfq5C^R?mPC5nk4!GljuO$+PG#|*B4a_2>^!?m-qb{I`I10^!40&Ah?Xo z5pt;rAZdrM_}>Q86li@(J8)D#f?(9Br`@U}FA1>Jx%%}~}bmH|q8K|Y!jaNAu?dYM~6 zRZJc^eBV;Y!Mnx?kn&2<<#2q|Pp)+P>ZBPmqA2KkX?Et2s&9LqBzZimIWVsmGYatA zRXt~RY=fjB;A5x~rSrZ2e#S!_7>vCGqC{9lj*|V8LTb}g!H@mpp{+Rn_v>x&(6H+J z7}nKf@B4Ld%Z-a7|M0=og<;D>XSx@Y&lV$4Ekin}o2SXK^<>^M{r+%K-I&?XE$nJSn(xJK4qrH|bnqfPU>4jm=e=x!oc#?Jke&g(g- zUucQtw<$SVY?d~P}!t-c2Lo8mx6d`@70 zvP5TBSUX%%C7-WOwciMN4WbKqP5B%ow3f{Z-jx6kgNKYV|^tpbL^<*qZ-A^30n?FBY*Hn_q~jp%0Mg-<>UCF!!;rL{!Y{b z*3Cv>f1?;licgf`G`bG-zLl-3R|wc#Q538g0z$S#C86oCbHSjNy?ANChiOIVH2rMI zG5nGlT3Axtm$CYA3AoOV^jpuMy|ROZ?T(T^1UI_*!$t2I@DM>^@!2%tQ*2Px;zGGh z02fo5-BK-N3cz|cST76mXYkO_egPK}#MwY7cUixalk{5k7n=LGIBj3hTJKhyeXzl~ zGo3fkBcT7$3Q6oSx65M@pbZ+YC;(b=HY>1%!!mZp6Fqznq0rpI#0pXZU|dVnIlk9-%u>~`h}VhYjz zmPod{6t5ndj-zKD=!WOo(!>9dq!*2ld8_8dca!LG1x9m|yPCUXkoxbbV)V`B^QlP* z2QLUMxOI2m3%(x6c>7K);Oa-%C(!K#N~N9Ef%3qRq9J)~x4KpV>itdW?%7A43LDIa z8X^^jrZk!ojDyDSMXww70zLApJntoe%=xcBD#D>RDy64nfaU_M6Z)d7V4v3O7+UfM zI23&xL2-PqOi$oj<6nQBorePGYWBHH+x}3PF;m>1({p~`Te}(*tYP8JcKw|ZaIa3W z5|KeaW+a1}*~V9jOh9(L$~YKYYcNd}*`l$FOU6yA(HR-(cSZ&9*~&v1R}oErionDF zkmE|SIb~(H=VJ$DZ4b&-CQ)fO@a_a4)*zSnmv493+6k&S(%z0p_QJ>psX^O_V9lhrb>BAr9 z#!w93wGILaXkvaRP39@H;n)|GB8ih{1e-l>kB{FBn1qGHL%+#NzbvY3$Xf&5Ir5z2 zPG9!I*3-qPiSN%$8O#PHBV)1VD}P1)O~7Dhj2?72@pBcduzphsN8H)`k=p3Wh%;_$ zOeXLMp7o@Qaw@rwstN}`?{)X08s5C`DQlRw*eDrX7{@P}7d8#NUz6uvKJSkcQF?Ne z6pViyWiT|=e=Doa?LjcWpUG)555Bnx)chgcgWJ97&2EQZf!xal z)p2nI02nbGF^RF>u>$hlk&33=WQ-^JoI>Si0u8 zV07Zbz#>r^qAXD{lBu!00RKml^p=Cv64=~UMF`M+kogAK za9tvbFb_5Czmu~*!Wcf7X4}nlOhFn>z@2UYs5e8zXiDYQ=Ox))S3>&zy2o(u2h5!JvYvSsLq$lAJ%%c;J%Lb@e5mEkCW z?eZ|Dux0i&Si?wGLD+e^#G`KKbCx{u6gsr?6jUM?pE*3wAGiPuHc1MIvY4|WVosn|)%172v_ zuJ9qyLTdW=-$|n#8!G@V$$7Z3oifYzxs!m`vv;S}RV*&e|L#YrvkJalcR(jP&|ivp zdX?VXKmoSP&tSH<4&P*Xc=vJz77}8-1B8!d0cW#BxWLd8o=iJfUfU`0+(QVsx$4{8 zM%dD+!cq1`U^-K(q~!|)T~eLAZia5FB+I+)`mCM=ATeKEa>FyeeU0P0N(2$?H5_a% z1c?1K;t}s!d86fx%Dsml&FIN>)%>u!tJSay-_BD*KV3b8rOY0MRDF}8&W3rMO8Cvd zq4No{`UQOiAyeW&=;8TZg&{D6<%2^Z z!|qE6iY8+BPguq9y#O>n~H+h-giBAsF%%~f&;2z zHSJ9+elB|j$&@GebI=dtreMMQ&ghri{%!G?7SS%=%2G0KqHH#RkD(za3ny=Hi$(=p zLGvS3B|d!WGOoC}J8#If=~Y0uQMxBB0Dao47Ri8W79ysyRyY66Fcmx+Tm-DB zhy25cx=95+#qc?ToUlOnSSf2{HM2o=*VzYQSjU+-RrVoQq-g{FF4Zg zE~D2d*8doXY~?Q)$%+d%R^R5T*Ja|j(efj$qMbfNU$|`D4f(?#^kdi{t)k*vJRUdL zlxcwb4m#}66CTp`2n9CPSQhv#x;!Mn5l~6yO6GGaT9+UCvj-#Cg^PfUgy(9?6bFXL zpNb`ZMW&HB#=RloUUl{4T*WAYN0#{>9S=giO>#Fy+5dV^K*r~FnE~_`y9;cG`R|Z< zoOm=C`0i!|j9q)!?A~%82Uz7BM!4{L-9s2&lDz;lp6G%f*Hh2|EjuF*ZTdWkb~fij z6_P^E5528|&KH1y9o-vpP$5xCn_I}+iK{MC;6&BY+8Fs=m!-n;b%SD?b{UHjMD=vl z=|HehRp36=l!l{Nb=j)%E)c-p>$yu+7f<0NCv?~F0Cqtaf)`7bVV&u>BhZse9N&i(A3$x{)K4e9C)`q;|M{`52%Ol-Fg#F@RhIVC{{nI!7gqddBASWD!btp-(BBw zy3b`l5s_nR2<)6q^Y+vd*eWbZ{zSIO{;S}l*pU8|lJn$|PvBuKUqx7+=-R09e`&ej zfx{|HP3Z%AGj5jsR!`dCO19@yQ~>yvW;*!(X7#4zWHpB}1(BEfJf?t!{10!5-z-JJ zQX-eGqE>l9_7%!}cZXT{YORv&H@6?!P^VBI%uu6V6=U2bfK z-nUhXzIRgAtSRD^1sRqBr@J>`*yP8cp7G0o-9a4q`1%ZFqkHR25(W(nc!>F8Rev?+ z2p#E#0X>$-*t{U__3WWm|LRC(^ku5R)_I#q+`)twhDXu$zH2tK)}SV;F#zE0@2 zg?0JR?v@D90Hrb{11&%10Dztc$r&o2>~^QX>Hg!vk;( z#!o$oW+d2aJ3E!HTRLmi#ku04&fiTkl>~TQ=DSMO6nU&V@0^f&T|`G#xX*^A`Jd~q zJ}%Ne)$q(Ccl0IwAN0|Wt_{zb<)PfG{R#-xbxpIXTB^TSg|zin6u zSh5q{v1O+fzBxjo@#?QW1SARF$04v2_)CFv*=aWK_yOuc#x(QJ=Ett;&FUqs;sfxq zCIB|&O^N=5HrZJJV02Sr(xjsQLk19jeTIiI@V|PQ~{$B-zwT*x3pGviT$60%8 zCF!>divF-$D){m87X$&aRcy6G_WdbycC+L(o9?%>1B5-W24q|AHU&J)RiTV0+o^D# zT@WW6EHpXfOd)pp&5q{s?`;3C`S)0Y*FJT?+vbC9;6s04-B?QK(}F_(bAgv9`a9z3 z6M28iWc~@r|2+7AU-9?vZT>GSHUD2*%^6Xwe{?i5`rX!MSZEWDhZAtQj+cwo7%6a? zSLc=zv`#AoZy(3i_dRGaga;nDKI!IPS|BN(j!XSr`)E`qYOKB0Wf*X2oba7V#{I5) zk=%1laIo%)G5j-l9>dPfyf>2it=GmbYZG{h1;(^o*K*Rh-V5gQHTu_th|#qnsfD#z z@N=S0eaEKKL8ivW8}}v!0nvu1qUJx#E)FXw=}JTjohk=?^dIb7E2n>IU)7z^yXKN5>F_agCUG}=!;#J&CZeBX*c`T6-#zh=YC zndemokzv74zo3(!G~OKC6xP?%!8h!~ZNg_vh8nM8JRn4`F)hCQXDep(R~_D}48xI{ zy4B6+;dRhGlsf5MLde2Kp_-kt&0xj4>3R zhquhEz2pj?@1^q#2>W9fj)Lo|e>Qu;f1NoyY^u>Q{MwRUOwH>_4=8z=h;cgr9=^=* z?xGoVzo&BQKig6XySlGE%#IRELH|3M`R8%$1||7_>z7ob{BH;Pi(>l!kOxD5aw~vz80WD^z{{}CSKKBaMsdz*X zg6)>mlPEl1p-B3iKpQu{PzB-uPdhWO{u5Cs7TY70bf2c^q^bito#+l%nrww;wH*q9 z9^AY$9%^s&xgT$p@9X{}TC>IZXEuYUIBot@Zd+L=dt8Ib>xM9s`UCq}w*sdfH-c>$0J>4`lZ*J!KJWf!Y{KJ18 zO*eu+eRMMb1qB7s`&Lme!UCS%p^vnj9Q2HvZ-t@@!T%j}87W(a>}+UdXigJcB$4Fw!o$e+tk>*3^i~SJOF4C(3^hQo`+k zUHc7b-*l>D~O}$@DWtwNsB+WB=I-1wY3B z)aL(26^f6bcMLQ!gU#$v8OoT`dO;}%ZkQ@+oL)F*{Gtk~zA0_h*@O(Wo!zyFkK)04I`B2uMsXC_I zU!z7c!RhYhJk8D~`gE!0=iP>pQ1&?a zB!)_?vR+2ekCH#{3X(;%F)T=$KuNw;e-z^P__rCKy7~zHo4Nd6PA>hsiCK;Rkg$~!x* z1oZ}mhF_&o*#{n_Gl6O4`E5MaZ`8*?L(y-2KH65;x&P}1M}c~Nt(r)Z&EUbuGWgb` zq7h*-WJ2sQ%Gao%mg#yU&%gCFZGLyHw3wSiqxS1=ra7 zhfVM<(E_q=xL(ERoMH|F6v6KtK8Lk~#`=qi2h8)gZN zpyUxJ+PA&F!GFW~&t>#~6y)_7(HpW8GA#0Jj)JnO8cp|o$d$>=w7`eLBf~3W4w@?I z3W{(h>8dd`6ru&FGa6{(H&J8WF#<6i9@Pa!~XE?j?N_|er(s~ zoQnPL+2qvYPfp!VWX_=|XJ`LT_K`)B)Hpg6`5Jj1h*XuWGaakV^^5GAL8 z1<+W`_)7+Y9;rgWz7UMAb3^H0$qF~P}9YX$|(l68N)eOTs+-Qe#c_pox#H>9Hd=PVCb?037 zc_zYv+uwJQsXssy&e|r6osX(3gtZO%F+;}1ED_{DN(OKVGEW(OEgOHy`z;Y7edqUg zys_WA|GWh3p==edvj;U(>@0s)K za$RXeodzH`gT9(d)4eY`^}kKtGx+twpn!(!VK&>E+`yXpuh(v|Wpi(xTH=d7h;v5M zR!OVLI0!YPL@|EdV)~92GWb13R$pt`GEOT?Qb3x8FL#*Qs?^3PjDp30bwiH;|K&TnmI{XS_VTuIA^Xnk) zsnw>~BEwGBj$xwjGp_8r=GxpTbLY>4v$JC!E~~?Hz8N?^Ndu^6cq%-o7f>+JKkXTPIu#nTp1%Bf8oJEn+~#k zN$lGfo=h(}gTm<=NmRx#HWubhurWa9!z_j0mirhQKozcX)o-MCKS+U+)JmbYr=O&@ zqxm_+j`#c2m5$2FzBZCB1j*|si#Xvy3^!Fg04#vUxMh?he_JB87X1Pu^@Js}Al%lvRC}tTS?07wM`*eC|2fyacbu0nu1^PZ>k4AuS6p2pa8h}3!lXb z7r_gjW1#8@siJi4P7|_X)OLVfrXKQ1D=O4MjItz#=B=8o?40SD-1vq-P6EOgSr>U~Z9S?C>u(HvJCbLw4qC ztop8mY8GXcZ~_~n((s%NJy11JVUEbad`sQH;>i#eZ%GutbswFi`1%Pt)KH$zcr%DNDbV>DfG#DbOi8HOuFJpN&gT2;Iw>eOv}O#o z4R?4w{O&%K5Vb8@eB}{yeS>?T6RABQWkJM`{;QZIfGnGhyGq@IV*-6knvpw|-p9>L z8_Al3s`00QS`2aOB3S!KJ6PoClJHk*^e<9Ad|2h$i@?&-W7MU;?%kal^yz-r<+G^1 z3ePEaFu4kt4B8S>_b4Tog*3~bz8YIp2aKD9eM`&~kMoKBWiRy9>3*ex{3JikcJ}Fb z%F|>X-1Il#2ykyN?PknmKS5VQ>R)oG6|@i!HKt@e_*{`e6InENts%!y^}F{k;`8W< zOrqN3znhy>Y9D=`Y^b~%VAL%YTfa)04G_FL@T75=u?EDHHkKYcahGyN8oqe$#fkN- zL8ZX;gEHG~1>0NUj1-Y$rY3Fo=O%*5W=W@_?&iwRXu`HWXo{>Xyp@Hhxe!iZ?z&aD z4#nffwZ_Qzzrns#X;7I)Zjo{zoMhLa+xqy$Lg_DE<4d}V4`)a2&!Cd8UrIb`$7hQ~ z=rk3pL_>uShe-#nDQLLow4nimpL(^LXX95){J{Vs+#}lAx7hhMZKMAmM z@F@}Uj3|<`r$;{V-DHE@vA-qpGrh)EZ5nLHWL(KsXXqLi6M2tSeldQ*-*^A#+2(TN zh$e0D&p8p<0o2}CZ?Hhg*9_EEM8poNPOG1Aa2MN4ah2O+F;TTtw>uGr!H)Gh>J2rH zXFLlZh85r9yE4=+UxGnHePi3;6^A7(&UUa7E_@yVU?4Y_-Fl<@d%Quv-C`T%DQ|3``&(L^MPUn-q&sCZ zIsW1CvgOQcUB>3?@6N76^$4n~f@AH|@$r9Ikk}0E6n$%+>4bIhw}NC?o0k^zHGQCq zxp%a2gBW2V&eD+hK-KcNgv_rD{9j9$3M3nTudV&qOyVhqdTQ*bNTlgAZR#YREPi=I zfkqQU1+uZ!r~ zapTZw$fVK7r9vJg-B@Ml62+w5DO-4xdbOHw%~CT+&0R2hKK6+*aN;}#xCcXC8`-rj z#;6lm-Bt>#;*zI)V_WakvCNkFRBe|M;i6nIt8_Sqf)GD$y4Ebet;_EQ-h36+-}Hwi z*G}Fgdp~G<3==(#xp-|EIBy&Mupf-xtXVY1eM0f9a^eqffibJ*| zFeh(6S1byR5ldEw}h82UX3!s5W0g3eUd%q+f2x+?Q9?AJ$OF(NzRM^O0ul)+F&srRw4rpP9NNM zC+6g5Exi}AgJU;t`_6WH(mrCoZ3b*c%ri})d9Ihd2^NoS7gwNk za5jd{cQ*6X&O$wBl|Mpu%G zfG|V3AiCEMp;(0hIdu;xI$DRF-Q+5CzoEklgGPL8%wa`qXo-C(ae{e2;oprIn(;Y@Rg$=FML#BVB8#k+Rsl+tItuyeq~L*%@f2v&d2@{8TD zM4U=vKs?;y0D1T4AlMAjt@pZ4y~b5b@2%c%N=e{S-}#nshr*)&pdIT`hWpYx&!zQe zjQd!}?*!y1TmKrsOhSFkV0&vQpSUeJ3^??Yn_vhJE!C@OqdrT8p(8U?oK zh4%j8J@{vmM&n5g*a{t_Z9=H#&%@^O?8k?dY_{BgDp+AGs7eel>=}gdqYj%0RVi$( zsT+LAc6Q%axVf$PzQhzC+57B3hfK@;tUU~41cfVo{!Kj}NUffe)J3ZeQ!*z(w z>Yf&dPaI1$fq6}(4-q#NuR(Tjuk+8QT?>!Z%}?WO-j#B?w@`gzPQ`$y$X_?XzFGTR zq4hP-)!S%(Z9A9kK-iSIk7=8q-+i=TuFWi-ym*_>eUoPt=U@$W&Du0xolIbxFcuds z4|Sb9PnETL$71WkID^fx}bZ->Qs>AzZ!# z)c%0bGRnt2(({R^w`7S zQ7`JPVihS~JElzLcg&Jdd}{iZFO;O*+4PfZg117qLHd0iCL@#g)Gf`g%DXKUr@=Yy zaQwqceMb;fi5;K|T|B z`ANT$P7xM#`E`EtzTje-z>i*~rOcq&w0y=+5+UNB=7_ZR+xavh$!gMiy9+D2V)I5) zXmTO4S339dDqho((|)vpY7L~`^o1fNL?K(C>SAW7+0tP}5O6WnD~RdrArPuwYBrFn z0t9YDTYbmUanM0m#&K`|H1tT-76<{b^1V|*ZWLDqsJ;U0k+kIi?txp3rqAApczcKB zo-dSweIHV#%4W#2=aTn${B1Sv+UK<<0kN}qKR$ZB4bCuBx0k6_9x~vVoKV+ z&(}WQ=Jfd5nXXxN3SCvQlpXd}JoI-|b2eC!WgJd}PGeu$0!A_7d^#zIInYxi2_?*Ae@&^G z$PDnH`PPs*7BM*M79tWQTA8;<+CjnjahNS z)TAw}dr@;mwFV9luiSC7%1XKG3xtoE5sB2~ygqfPHmK?D`3S&-UbuAZDCpu%&f(5$ zZ=tm6>C+h!4NRlD7~_9!xK|Rw7kh7$EdN8&O|Q*;*ZCaD z4jJd=S~Xv{DiBm!zi9n!b0}i$`%OoeZgb9z_M07f<{%w$=I`(F7_&6GM`$zITB8MB8N6Ln8`vU|&v^H% zzlI7CK3Iehb#r8caRv?DU*F)1A3F@2*T^{A{zQd`>S=|uUQsZ&KA$%6(}JuU$Osz{88r^rp+Wi2e{`0T9QV1?p4 za~L#5T~1-Vhe|5^Tiu~ICc2J`73V*Tefm#B~4=bveHUwyMjMBL|;cX%8)=8 zoFo#i&)!T+)w-21=sR3;km9s1*flcnP%RDC*F=Tm+O94aEg_pD%leF8vta2*Az+P5 zADCIRacf?WQ5yN&B7R1q%5=w5DPM1NI*8FkNSjOkOD-biO1n=>Yb5tgEnr6RP3U8p z5Y3K}dS=;@c)-P$KCeSaK>{xIyvtA`@hFg}FUHmS*FTS48)2aw_y`Ge$ znPdOp^4YsOOpB;eHiXpO*`L}sIyT{J3b~>{{`Hm*>q&-6fwqLN*}Hm*SJZr0npYDr z?=PMOu;BO2GP-?w@jR;0&XjsqFWugHNL(Ya_7gUH7>j4_c5%P9E#H1=OZjV-#{l0u_)~I>-0fUVyiYkdf9XWUa zM1Xd3e6i;hJ1jx+30m4J7u2Est`0T%J8*(f$K%%KjgCZsHvMO3bvqCnPh3H|?xQma z4rSbdWu=z(`9a-Vy*y?Xf&ekh=h1@{dte9L4d-_~uQ60YMb*`Oc8Afv+%Yp?VF6=U zBVxaZSM8}7nHB{T5Ec5;B(df4+%q?_-G3OE5S=3EkUl8VV4L_ckv;LF(c9jrKJ0u# zcUAY~BU|YBk+VVlfiscRFj_~_Mj8R6yWmfL^BTYEytrmUr|}&luY{yq2gBhj`^c5Z z^S(cSkrU0?2?&(}>)0c{^rSVWrQMSY%$yc?UR!hrcSNmq+0&B!svJ0?5C~GA8}c>6 zj3N{*t4OCfKpu_^evK+tV7fprL3p;sL9(|iBI7Pia)v6MwpCc}&x=Mz?g403Xl<e;viOll%5G z0F13z2bFa2Hzg%Djq*8s(f={4DAR z_VYbC*mT3k8^YwXI%jshm2GBx>{5ieUdx1_gq9OvdT$5b@dmgLq=((RU{ZK6<-f+T zm}DK>i(S6*_7hf2xOTX|1-7HO4%Lop@E&^79{! z@9zg?%&B$Nbb{u$4&`iUl7ECne{W^Zt*<`qAxIkdiPu5@9OKNSobC�)v~C(0C)c zgd3@mu<_@wnt>uVJydQ~oz|jKOy0;^`Z?+o2D0^+hp!@j_=nH5zG^AYBuV|wimv<8 zJ-BGiO^XI}T+0%OK+mPa+&L+!)PYa5H}wL${$XzJBCc;XV=Co{g^!)F^tz?jpNo4b zH_VuCMYaCaZVyd48bC?#x#Q0K4CK%<=X&Zv)V@IQ!g5ZVK?zTp+C(vj*rq zre0*ZTR%sn9`4BUqa`iQwuwP$!iTu9y z*^Aa8nvPt{NV`}cy5l$vTGknczicBgdPa#+$B~_lxB0^l39bW-wL`u?WXo>LbCrxs zHO}TPn@o1wSYvVPGZi62B3}9ADk9<9rEQFD-?ViCJHyk~ulRlQ*z07+ zmqT0+dAd*&o$#ah@3U!@BqPvJ}Ns=MjBuIqf9PCEedGznEA@4tG^@#xdHP z5}hhW*p9vTm8p^F2zoA2iJy%YoUT99TiNM^!6xPDkXY%@^R6F7n4GGx+4V!RemOu` z=Bso5M|O}5LA6BSOdLB#UmR7s1}UL!yoSsl_4aP{66T2X(LM*|9)bk2fjUQG@;XV5 za7g2iD)Klhxr?NUp}g%l7S(du@pSRzjsod24a*3J?<_x#8}8QdV|kf7grum zMHRS^M;MRa{Q64RKHpz0W`#~YUyQ#oG(l?D10Z|E)=~C)c9e1bRQzl_KE8L*d#S4H zGq*7)2eRPeh6YhjH3bvBj1tQl|SyY`C6lvas01T(9PNZJK6 zP3wxPDqmT-KbA4>ntJkBD=r{uh>P2dKe_5iem*i@&Qi7(JIJESfjBKGU&VlMgWXOZ z+grrgAg-ko&vt-qp3qk_{Jyj{S5C8tp_aWI-lcFeqdCorB>t+{;r}X*a{YZ_D7jsx@3ZLF5~Y0 zEmA^FHl-=O@oYTk=b{3)f#6wrVMR^aAFkWt`K!X;*hkOEJ}h?qih1@jUzl5Auc6L~ zxmKdYX`}A(wIiw@Nvhre3EN-J<9T?KI85Pa#lXhN0pxf~!g)YyRJC$%aOPVO z1|N}Vm(EBijEx+5zwlamO7S~iGl_`D(3_AYNv=Tp-B zLfLb!LWW&-P|dCrm$Sp?uU4-Z9Z(L)Y`Z^8vKv;BwSQutkP{9P7Ks==4@J%CYWj*9 zM}5&B_xX$_jmo8fH#TZaygRjP#vD;JIFLu_3CL=zp!gk|koyVmeEXBMat*taN>zb& zg&Kq-YKy~J*#7QCz^h^O!Y`}mn!;bvx)sw2>M`%V$C^-PmWPOs%LdR>R9a zjk<;fPnjUHaeQF}hq2MN56#UAxS3c@3Q9#gOvfR69IJ)f)#IIsnP!H1MzFJ+M~v3H zm2atRwZuz(u=p#QW$W$iOXDKnfSyYt`5~>Wm|Mz|({I|E$#NdL=fer>#3u1y5dSj4 zhbTlcNm<$ZXDm5+&{w;^Vnmq)aShdk!HJ)q1*3!J?c7eue z4Ayl-cd=DH3Kr87G6hlUw+4yt%YStriba0x#%6h8yWB{-wpg`bEXk>vAuT`8CMCZ= z-ET)=GS~U_weHAuj!N8$QxriRCC_$2*OZ)z1s7+y0Y=tKL9QtIwdQO;E))*V`;X)q z!yVh(pIlUb7qE?K#Tiudee6%#>#9!n7viM7$pyuCMEsl%le^k_Q@40@a~s%d)S`(E zEoa4Rt!`>1A*l{oFdqaZ%8$Gp!HH!0fyIoqj-0fBJZJCd=cuTUbI%~>YWI-?Xf_iU z;p(r4yd|!ntJP(HtQYRCvJmF3CM-fcN?4UOu~xNlO#K4l9UutOL;i*TcD40HZNfNZ z48=KpV`9#O&p~l1lqXnxeu_{R(_Fy18x?Do2vyIpfsMNi==h3*DeaW9KFeGKVIEUk zFA=1Sbsa>aOw&?cN(-LAsQGLQI*QKv_J(QxZW9@`w79A$t3iTm_8RU}= zPk1~jn1_ubHVP*Y=ty%DSKZCk_LL+S4BZt3ps?hcWV7U@v&+g|tce!uuT zoaf$auXWTi2^OKA6T^5VDK+&=LRZ zh}nwN4f|Wi2H;M29qxDsS1;ds?$L2%vs&=*`}(}x?fu@t5*h?7mkz7o7{o ziz|$({9mgQP|Q^QNr%LsNmqXDY%h(Z4D5=5G#s8mXc;bGXjqNhviHGjue>Uo%4SRF z*bqwj7Nod}m)P&L4UmIEG5T06`^F6ydHyGsz7w|bSdf}FmmV{OAIoAn zvSLZ+%SiQOM*3+%Bp+W1Lg$l}=r{Uk#**4isDECH=%jX5K&c!$Byp5BG?w8J;=YkIeXoqkj znKUFjOl-m^nECRn!;La!Lg$gJIgh_m;Fm}zxFr*;hzA!C9k~v(P>w8rpF(hXh1ovr zzA%Rm`6u4?vDUSNLT~;c9KJVF;WP;$)M+Y!vNGWDe8gda@!UuX;bF}B<-Nf*2T4sj z3>#r!`)cWpK08bL@-hHE@LQROyQGIdK{mv!k;3mAV~Y*& zSx9%5c6=H`R2c<5TZom~S)T3I8*R!KE9Z zGy!Hum?_Ifj#-ah^FhR$lt)QpLd z4Z=r(dZzP@l^;2su|VZMmnmOEH~2N&6&pO_5y1FY{2%~AEy}vnB0qX?;I+BeKcB&f z|5-n=5l=bT!BIq+;RyxX6beD)7x>UAtobc61SA?P_ozwGiB-Aj_c@!Lx0)r0&$Q*; z7-Q3p>Q8fJ@t8ETi=ab%YjAt}qA~>G@Vs;N-`I%rADs}msjm0>eWY*01Gn@It7Gr) zvfk|JHY~V9eI(H5^?}anqY4?%?)Xku8F<& z>_)a|3WD-J7>6{IyHJ7Ny`sr%kPEeFA5=8sz8I;*LW|uf$ijVCB$3K8y`x{FJORg-`CT zC}*oRScJZ^5!az4e_~k*L8Kie5o|%0U=n+}6MSoXJV^q{avZhx_N7Rh6~0qzf$Y&r zdu6)*)REIY#^T(0%7wuvlqQEMvE;#rG+58^o-`ukh`jLP##HQy1~6-E4c@rB3Pqh8 zDUnBX7mjDFaBO-{#bn&eWY$}&K#}-hW>rwhHS7<%)64c=7yoZj1-pKq1+iGlPBJuV zKWWI?fcdcbKl5WJrm2fffh~(~uvkVjp*vVr(~|$L=|8=URvWRpUf6Lsh5vzbQvm?> zx`zl(i*xr!4lxhdG3~Y`Q1gGiOqdro9<4s_DQ8>s)cb318F(RE9jSx=U_oa)!&<@6 zW>xI-V$Y4~$-l&cpIC)?eD<+JdcA$LeW$*9XCE(FnjzJSg_7=*jN^W1@WeUBcjDH4 zDPL7o!srDPfz9aXRG;qPXHjo@CM^=WfXt`E4qzoma*pJ40+uSL4biBj23qPqe)@#A-O+O882J9sS zx^ICqC-ENXg873a)hiL?Yz@}dc-2eO3P(wUqi2Mlig-`}Xn^2<>c-!c)nYA2ANpSM zuX$`hTok?gLtX^Ds38~f)saMV)hGjY49J#-6JXcd)fmPuT>MU&!;gXb^H(>&Zpei{ zD6$?;nhRf>Cl)J|l?%H+@7`H_THjT#q2NZFv}4$jI?{y^AFw)t(<3NOQOC{@uK$`a zoPZm>!1K=HBz(h-CC8)qCeFF)q=Y?4W0+Y>aYM_;Ck3GXj6bx#QiT@aGiN1BTVkl{ z$_soMv^o*z|IS*ibD=5ke1x4mH+90p^=6jL+vCqdmy>bpw>AThce8)=@3y`C^n)S` z2As*5mQq-ZofZMgl3aFv4EY~!kc=DVgPk4%_|XB9(t z&pkSvEgC-Fd2cJ<#I~D^+)wy<2|Dc}KteTsyumg~<4T`RTwO73uT1x6b7?Nz2m-zv zqyOe#?uynui^nat&s)saS#K051fD3HM8_dfRsv_4@!qD$rGwLBE5@Z2j9$ta(Iy%Q zyI?(ek&`*!o}zI)2_mMe+s^6{Ncvh8eAY-1@6{vYFcn>k8*Sfm zy$cr$g*55TbyE3$Y-}MsJmS0A>(>=$`3LA|Pq1!y36T*z%Y;3sBPxQ9<3LzLbMRC2 z^lI6cc)`I^f-xhbbhyc!6GZwVIRv`9)wSdf+(mLG-yGJyMG40l%UHu-3#%X;qlpQ4 zI#_zNF=lp0{;4(>6BbnpqPK82Py0fT!H1JSM(`6+d>88_BgyPd;`e|gGv!)&v8f|h zKFe}=GlJEsk%FxPR7!jXRBNR>!wcL`rav1Gca&M6@ZFqE% z`4Mh^%VfTB>88(OnS}XjA%!~1TgzdO3p7|7|926;mpc4??7wq26+B<|^nJ2fDzywu zFo?l1EdtXHOpk5ff@z1DS-<$rG(ZFiXuFs|}Y34Kpxiz9w9v)SYh`Qlsa!LK_OFPk$W_-wQcU; zqnMAG5Q$Prs$WQkS8`znPLX==kuQ7CiAW{Rl1k9zUL&)gL2Ky%RI6%ljx`3Lym78HOG_r#NWZ`h;UmT; z8Q;NB(OjT-ypxw`C{7rz=Ah6?Ilf*d)0!r@p+-^-rj8xi z_6SQ&${Rp@207;QK;#<376gviKcGm_O;|y6$pBqF&Tj(sX+L)PBhju%zN5&)Py{q84S1 z!u8GCK6^gp(|xu;h?PPKnUh7Lmhp+RzfjWm!UtOhw9(KveIW^uIn_ z_4XfElclN`*ZUd3r=6|g_*_mCYn{^noi)emliSaY^fz<49-|%;zdlvkVbJWlK+ewK zY*{HA(P$@!lXVkSTpg#-w&~WQVm=nA@QV~tjbwOd-7zb2C?(IOw{6?D(sBB$ncUFf zOE(5xIKJ9Pt&il#NG9BsH`1^QjnQt{9LJsje&!xuc&TL(@ zAuXdsJ#S?ulhXa4ohB~W21ju2HEmn9;Ale><}Dj~ZAt1pw2jd+HpPP}W)J-w1RDseHl7A;l`H-f zBR?QsBau>#e*U!E>9Dp@ArRa{F&#eiGa?C9X0D*u+HD^SnppyBly#h5H*jF%%7=!sw59c9vD zehhfcSO<-^K!2XtS}}-6ld)lbeq<@ttMA$#^BVn6O>T$3LxpcObE-NtEn)SH3DAgsjf%Hy@L@o z>)9|}Njhf6u=~m;LtCH0meC4`1j`X@*Usz5Oj(WAi)jVKP9?vMg6!#`W_aJeyzA9E z8Et=&jhAK;rplBlx~kENNni)V)@4o#6iK~r3DI>TTeDky--t|0k4HK@%pgO9xQ%UD zyh!gX7B7xtM3{)5K!6}U%CGpooZ#bwfJBA8TNJ|w2h=#+HMy)2qAkKu)x~cv^MTR5 zgRFZprT~ARVEa$0VJl_teYh6S_m})2e(B2S7D%gA2}!UY_BEL%&Tpl&tiC2nrB;xd z>BKo49MIQG#xbHH@XVM6HDxXHxI_x8HLWh^aO2<0Q|I4KOH9SCksvdzy{{R;Q_qkt zt6QqxbuiwIc%>4LsbH_z77CuZ(N3Eh{Hjl*tq**sjUxsbL00hB%O`K$_t@x|s{n4T zNd=a$$ae5z7;Rcbu!eQO`0qOBG$j8>tyuBKRunfzdwqI*M)DkXw4BTY9#k;h5lpSc zQ`n|Bngm4zP!!TzK$%?Z-G;AmCHO7HG zJ4a(MJnx8jrjb>P`5nQ+l}d5)GCk*Icu;gi*^oOINvafMb|ZIakvKmN9Bc9!zuX@| z8c!6fcJBtgI}cj%Z*hu}cIGcMT*eEDaRt3viG8Pz`YPlFCsx%E3 ze|0qp+oBM@_a-zIsY9^~(nq26QCP#uvzBLITT-Fz1pxTVGcnL9>X6Hfuvh0pCi`ERa%Md2+UxG~gfM-;9Wc)ekf>K{tXe9Mtf!(RFbeqz0o?=Tkh6Nvrj3gQ`mk*o^N zm!-*o=#C|``9cYa3e9*JN%R@qkelPrEPd#e)szjS?u45l-g~tSiv;RefFk~@$ll69Yelw0B?`5LzC;tmCJSyx_+HqT%Gc-2 zhqa7V;q8X$f6QtH%hylOT@X$Mzo#h71A{SUK$?cZ-d!_6boCTtWx6T|zRb+Ik5lZx zC5dG%G$-g=G*YM6F_`aAlH>GIDIqE;_y7oJh498JT}+&LXR4d;+c`H(r3h&!=?z9x z4Q9TKSxmY$n+qmpaZ(L5^RA7HmY@KNAqINP#5>dVozR%cDNn*ch4az#C??EvxggEz zsSOE4zWxw3&F#htFngbgdsT{RM~3V7uK!%; zSN!T%2CcRzG~5cBOfItKldRJy+p^9QA@i?}dZ znE+cDmfM=j?ciR(FH$XL?toJf-0P#?``x(7+V%+5_T&Q}4ryu>>On>|O2>w&hEpt* z5)Q%Yc&uncx(~56ht=CiOPu^_jEY%zk8Kpx8pu5Vbwy1^yuRo6Z{#hTke{V6p)&Tv=g`ZHv@IDp| z9-YRIOoK7?Vhu_H48|kcl8_9){<@Y7i_RF`qbV6-7s>n$_Pk7Q+O8Ny@3HclM47Ac z6zq|t>*>*jzQ1Q3l^j2@k0ZK+I`N0qp{^YV!oBYzZE5 zSvR>;F(^9oMiSA@_%a>wFdl#lN12STlFn`{Qmaf}rDn#9RS6j!Q3~}X zj=UMxLXAIWT*~kt-mDJCc)Cpz=ibFBQnyK#3pFG)Am4l|0PbQn#eT`Vij|AEU5G%h z$?8@IdZ=eNwR^{eh9<;Pjkqg_&CZ`Hvor z^fGvd$l6WXOdtBDp6J#m__((+#YK7r9MVZZf^jwc^VldYv>MnCwxEHmjCA-@!jTj?aPs5l^liizJ(^&FE1FpZ{Ym2#`r~ z3$WnCaEA?+aPxO%`B{1|`gSd*Ka{eb%NZ?ZKVE^@Xr40xBKY^cL=YK*9#^7FK>)h( zQSI76fgkV{B@bpHxC!faVCy9_0+fD8)Zyl>Oz5wZTeI&x21V>$btPM->8wm90k^yf zdoyGD<+a&Jz#pF3h!1alyPUX(tHDr~S87UyD+l>$24NU?oQO9D4|DnM<<{P-5v z0EfE~)@KAjemmaKTCM0`k3tG8krF!R2_~LbrBR2%teCVPh=veVmQB9mWCw` zRBgo9P5Zjdo9INN96~`85TLimeAWEwn27-7gW?#U5e%o(cE$*1-b}L?*H}@0i!8#D z>Uo|PP&r6F`v|C&?si$#j^150fj%x~5ONvfry{1>s%V^z?BIVI6%;awoqIAAE+1r% zr%okZN!tCI+p9joS~>M{6SzZ;3?!2Dhs9X!)6EG?W`;1=K2r-_=(Wi~M!Bb|OgmT_ z`2VC)SopD@PttM9_!%^JN0ir>nt%q^UFnwBe^6%XTT+3YDSb?Ycreb%B%%D&Nya3+ z2w8xJsD7FRj?pAvgW`tTb`Y4^yWJDg1&-?3wn>%6BsC2_CNkshL&e|3s0g6 zCp}stZhun&7%~}K)l7`s*HIU=ZT@Ig^~ciyxVAo{|#log(TGcqhFz2n>YD}PfA{!SqL*%27i3L zVt~5xwo(|dpyWNbTT%Xq90l-OjX0{cQ19gm4a+43;MeNTZ=^*pQErF466HVSl3n+B>}KhjI4M{vNuAyFoXS1WABDQ=ro#C9LHsinW@c$u zat7*s0VfDf|5M;;M0)rQl0tU8yk)AY$&F5i9w5cuIvS^~N4`8Er&8j=LloSD zIB@a!n7j^ZL*-A|ES~z_uESM3XAG>{e-s_b5@Y`0H<8?2V(vtNLcG>P#L70QDc=)3S59YTUZanCyxMgJ9IkJd@Js*GAR@QbFvEkyRt*ihX00jFbI`A{T@Hi7a>$ z9dv>9Zj5Nb)QrZRk2L02K06WlI?fU!y<7-R6wIRSDQm0??g)lKHj%zN!@_9%(a0V@-q0Y8JIgQw0k zW7KL3JY)7Dk5n5?r)jU5j0mN7vF}HdGu<)aLXMCHNd@t)OBd>dOcSQhVqu3=2eTsJ zgNs889adQocnYQEJQ%-no23VQ4pIz4bPKzPwc4-DLBR#uam?%N00hJ1njr|mOjTE{ zuR*ca{PW6n35vM9iK!*t8#DOOToBZaHj4?8k)~387a3NBLhj#R<;uK?z!bpJAS{wMPPYv6QFvJ; z1pm(5kCd0#WeWoFpwEhy?MR{TpwFJvXUtWgmeSGOP~>%i;$uC8L4s7CRaGSMz)fV7 zUH@X6>SJwD$y@wy2ft<@D9oe0{#fa=1O4+V;?Bu0XBj9@M&lTPmY1jKr%$u)t-%0H z3-xW%={G`|GW$M+@#1R2?cK`Es+e7a%3W&Y1={ajI{pp38a*BZf*cLMk@lcca%YXg zlb1((z53>tdl)5ewLO~{@W(aPGbV;*m_@yq z!qTY3JAN1dwSq6%J#P}Te0+5klVk5cW$!ppnl4pN5rBxnk}NjD;mr^O8WxI(tuyk`0_N-ZINriG=?|u0V*1~khV8VY1|dGfHsb!! z+(Ui-?Et=|dkl0Y1P6cph=LaS8TfA9T!yz?PpqW;y^36HLg)!o#r+qiEHMP~Vi977 z$7(}MP96Xy$AJ4j@)5S$ z2snd)MC1dM)y=FAI%aa~((I9!l;V~J2~%)Ps1pnWdtN_h)#4y1#Z|)Fy9R6MzFoTe zsG`5SF9Og>19#F$6A!2U5?$CmJUloKIWH2K!Pd!8Gl`-1B`tWbEj% zwiRkjD6ZDTM|sd?csJIOZSX&P3A_*kqq5%5i_x!yzuk!p2uJdXg!FMp@@_6aB7IoK zTfZ~n1_C0XsCgX-MJnqGCJnx&_GY%K+A@wwo}wu?zoJ5#%SCTshjddm*NlVOA60_o!t^8= zI0W__5IW`8Nk&UmI_i37>*#cFxlw+_lofMOq0LpPidbt%JRf+;51US0iZ2wkzhXBU z{sXo$ZRM!4y-fB)6GIa>mYK;(pHg%hKn`sr{vXS;Aw-_P)O1OwGV)Fmp4(3wz9Z;JL^LazLgBqs3c>31Ete zkvJ1G`mg2RFVoXBnbHFFXWG}DO5nA2ddz$^Q8rNcLw=sroH}ESu(vXg%7D4dr20c9 zVNbh2>kz^V5OkSK&mtMk#;7y~;;>bHPfBU~h1=K)Dez%9_oT_M9oq@hXPaCI-KAEa zu{h^qo^D~8_;yJU*(bQ2%Oy5pYPXS<8wW+^w*v_EnVFo=7Mxz0CO69%AvIkDua;ml zz0U!d&tone{&(zC2X!Ary4j(iv_c8}woL+hqX_34lAb%E5GR|RK3+PiU)tc&EO!lKt<)6Q?q{01?$TSpi z38`d+Wo9~JQFS7;L2m6=S4)!eGXEzn&)k-^*? zd1y`4oT}4%G%!z%}xCXHc>M$mhmTVAT336kckoBel%Bj z)&g8&jvAf@O!Xhv1y`%@vuHDzBU2eIKJHE-d^ihaG#+dinEZ??qTvKcSlIFl81&S% zoHEM=3Op{yn%GAlOe-^MQu7mA{UvC{^itXKzvVGn(In#i#7D#%-g`5-t%^txqr;ss zRa0U@3P+4G!CJk))@m4Yv!C;=t6-d2%gT=&k-LlU|HZLBjegiyu>*aHJ!<&T@twR$ z^k4HAr3$u8`D~&vUEwT~q%_-kU^k{QgYV^l6xU@aP~?)2R7Ni$;PRB>bq>wO4x z2Q47emNCk?Js?qGe-5jolGaEsMPNIPaN$dtXL$dp|N+K@#;;e$!}L;e9} z9|)HU8%z}N04-t!fy*cV-| z&}2yI^chFepYwSOh4h{7N6VIfD{fU8et0cv8q!pPWz}4dDhN9|6I4wEbU6S->l0aK z?`%!J%XqGI<%f9I^uH^v<41c29XWsR#SV7|oO?9xCy>;&NqxDJX*3)v0PF5mQe}Es z@{;McY=s=QsWN-j8l0i~VYxwu_RW_Ls(MO$M{F8D_^*6~WTdgNv!&mSpEEAgV7HKY zTz%Wg9D9(mFuZm&NL&x$k&5rqgW!Yx@a3u(zOIv;Ue;XgsP!R%QYvY);a(757zH9- zc4Ud;32BE97bj;-a`!?>KVi0llNL>XV{9ku{Qmt2^8w^JR*d2BdNFU}#jr1+?>tXidnE0BuK=S-> z=h>P=fbRnz5T;}T#2o|*n;igrz#sHq*Bq9%ys)H0F?pyPCv1_YM@pkxZGk0jT@WbQ z5KDokY=z2KTuDMU4aqZi^4=l86&mO^S~CWqFJ#i%2anIL^fydaUH znXJV@%IYSNofgsOQP}Cg&4d09K3VJd-5y#GZ}o0}XOvHnK&sdphlZ&~#{|6}+ePr)l?$_|NKwLRKN(BdZ3 zo#DJ@U=>sU752Y!1jPp&lbVL#t1ET51sA7t1e0$u;%X|Ct*=X&mew+NwOB)Prz=`#`&@WnIu3xwe)a~C4 zL3v7x3@n3V8V#$U@_G!`_`vmnCMluP{oO7rK%lLl3x8yU+u<%d=vI7RcD(rIYmub< zT~sKdn`Pe^#RKp{qrZlIH+Iz?rGH+&5V9Psbt{^s~I1Ml@4D2Us9a; zf4SJtwo@OBo~(qNojBF^%Gy!d?!UHHei#89mXzm%#QE2`WDj{{{~$+0LOqi*%6P%0 z%3*@i?u*OGyVk3B*A@ywsLuGBl2XYGDBy!kJtwQF*UaS`^K4pW=iof1FET}khs3Pk z`NJ&y!b>98;h~${_Too$)x{x$R6!8lWcpKg1iM0@TPL@5L~j{1C5nuVnU4R5xHDw3 zqy^a<2LKeQ&$;g-_YXS^u5A2l7-&=BGi7NvGn(RPbh&U4IM@v9x)hMm*~+kBFCBdP zu4W6LX$?j_MX-4Jo@9aOZxENUak7i;55J?NPMBy`KM7T5ki?o8-nY?+u$qaWER8=g zX0`0P5AGVR99*~Hw`{`*p!!-^knJK}Mz1=QZU%3}(R)yvgcrj?|fbhq#uk$67 zMp4}MhtDq#SrBar_6ynA{zL$l`8iMX#AmJRP2+R3}^5MRaqpmbj8GW4!Z$hLkza1`zr z@k1u&zx9zVlB`!`#B2Lg5tCAMDrTA+UfcW6Nk5kMr}E;uAB)ID3+Z}V$xKiXWLCGu zb&@@Pb=!WfDCLy2e{fUTg0SW%7c@zmHGmJkn5=1dILIl&6ZLKPV0MRz{m^T^tnU0UCMJ`aMmWMX6AQLqmL;?q?P zsbsx@f@LdX-&7D>Q*qjpw6tK(m1T$qYAVZXr#d;VCrG*3N1uYBJ$*>h8d-xGYpn=o zUXj?>QLCMN@Z(K7T^8!Pfq%bg=|gHJDV*VtQ|Rre}=?E(~;cSh>N0a!&!`UV$bA_ zrNERQ=kmQr#)YKfW1eZN?^ZaROvEf+Yg$8b;+I~$(Pc$u*9{X-G#3IEkEt*`$QSVIog6J# zA`y-Qp5M6VpbaKYFu}LMRK3jUvBOu0mF2z1`>m?1rp5!TB?KT<)b`${2^}{Z=Kap0 z{@V3UP2Cu&xngy8UO?MRAL3Ui;OO2=NV3gbgfYwkP86@NxCxSNd?D*Z;Zxl1p2TPq zrfV*YYx>zPG-*J6HTk{i<}%v5b&p^5)+`-ncA=7+ncNZE0?ZkE3V~-}!vX1E{LVMpgh3KmU##d}~-$~?0L z!|)PA9W6o#giPgsU|Bd3WY?@A&mz2kBdC8gH59E4D;y?C1g*@8X)44>)LvUB+KSRrZn=Pa@>glXfFN%iKv9F#NG)hABKjwmrQf`7$ zE^WH##}=w5_T5xu{lMbWSxb-&^K6pkh!Q&d0xdri^MFOgdH#*LE+|n)iWM|pweW{VTV9CFXr9w? zT@lQL5&`5YX#i=(c#8(v!80ed^u*m4}!_GKMeCmXy@wwvgds+K#6l{NU|Do5{(O1B!Z{bv(e>!|OAEauS zFeCzQ!T5<^)IA>Yesp68z2Lp{xE_t0@12s0l`&0uW2#aSd@}jt+iIPR$@|wAI{##s zO~&Eqz$0ku7AcgPbRy%=czUPh9_h?#Y7j1-_uwi+$vayFT~X+LPFx#MV3UgN7xq*W zdRE@0<>|@hX2qG>alJKa2Lf$fQ{-%T4DfS`J5Uf9P!LYt8I`KK-+Y^67+c?upqH?A zbu+jCX>IsTy&Mr$c#Z{Qw{IN)7_C$@ll$C^JjFaM4UaBV3d+sjB%0sMUs6dF*N}-xms`V{CaT%m*h#p@O z>BQbq6`f=qyyS0ry8-B=tf6jBpPis4XrLe+l{eb)ECZnKA49`I8v$CsCnT;z#CU*a z3rJ6pN9ZOU#7HD0wcJsit~-$nq-<+5xq1!z^C_`6szx(sQ!bfJfwoLDM^!hV!6YSJ z+0L#W|7eCMNd}#2)Rrn)R4P|t<_mHSDlSf8mDcyxcR%pilbomaJVaG_erwu*dH6n; zqfkc$7&t{y139)h%fUV|pyCnKR07)+)&mzNl~E!yFB_feQ(|~4lV8CVewB`IK~pJV z&M*5ev^{b(giYFsq`_n9ZtN>{C@9!j#P?p^RxU&>uHm3yb=kO%=F>&qmOf-m(WdU_ z|GyTDdlZ_dFE9Y<2rhwQ#LPA(L4NcFlH`}C(gvI9b*L6E0yhqi4ydqdDEI}QbYJ#w z6s3BOr4oJ1EEBU=s*~`r&>xDG?ao@fK z-5cUhSAgf=s%@m1wL)&1?g>1;v`GxC45skT;j)yN7-vDMotdI z3OSDKnsivlGMbhGKdZ2B)r5|NC4od58dXW%bW&>Fm^=Eey|!iZb?s;alW-ume{ME6 z^-@gBV6DY|joezuIF0uoWhvV7FGr*jd;7XXF#8r@)E{3E0EdqiKw}A+tfszOT1xAM zI@Yp=1WjEk8mu1Q_};EU1QG6i8p@7^)KpTH<|>_KzF@VKS?)}5?*^>Muh{Dbomv}C zZ)MM%Wl3xss_PQ69Hptk8=e64H@5$<)w6K{ka$v-q*jkReP%Hpze^vX@;;S^oiF#p zP^ZC<|BZbn$a_rk_ND!%!^nzsbP&HxMfr4&>`&zRfbmN4n7}mH0brX_P`(N#XNl#< zmlf3~Eab19m+!$p{M;v`C0hYbGa_hx+LXnSpxzr-XRM%bQN=*EL!~-s>=JoHgqoiD zmVUtXU2Q0#koE<;u(ea_d7+7=)KNo`nZe3H+js%Zapby%dzMdg8Q?dPc>0LC=XW%$ zA&94IY=F+HD-W#y=xdOp2alN6y9Fl0=p-sQ1-ZEslOzb)HC zFhk+y8%GUGuIY{$8=Ly=tk*N+t09D{jR&g)Q+MN9*#U%VFjBCoYKH{i_rn4lrfa>o z|Ip`>IH&N+O+v3&tywmNYXlqo#0uK=MYXTRWm&c7fih5AWF1K^{7`h}&tQ%WMSXlH zROqnOkl9@Ep_(hq0c+Lm%78cqD5!7Hhd0}Sm(MfNEQPfILeGVu3nP>A1{j(9C!*9% ze%Y-f92R*nz*5!ps^FtUL*f%R2QFQZ?qg>85EhKo2PkKZ?fG5MUQ(OS#3l1T7ru+F zj{*hHy1JjQSmy((?D|kgxB4pGy3VpoV$y(Rb%Ou@QQXk+LK+jk1>2b~=1%HZh4Dy`vziB=x^Yls~C#>020lv-;?LpQ~-2kH;EQQ~}+TdG)vi3@3};f$5i3CQ3^ zYuR*OoV=rykE7K;8F2*>kUmk|ppqG+Wg5r&D9;dTq!bzT=#>%e^-IZIqXezVLBrT& z@UWkNe@2~93z#=99oN6=eT_z!x91M{2FA`8&61U;EHu_+{`Z+zQ}A4Ix8FtM{{Ptf z%BU*4w@*+36#)eWk$R*XrKLqWr8}j&J5&UuyG!Xt>KwYeI}aeufkSuCMxXyXGi%M4 zS!>pOdOykWu6^(O>iAtNOJpgMtw<0u=ihwTrl^KTyoGbW!|`F5VD^;|{;*Ck`6BwK z;R!>C7GoQZuIm}L!o>aW6XTd5)NV}ssjS7%Bne6|c$O3=(!|DcO2obc5h<%vtQa7IKA^Y(eaz^nI_J}jXD6Qbc0+zw*m zGAIlpF_r2+duF^JU?lZXDB#CXv2-iSNV9zV=2n^iF}4MD^%w0|x+=}D5%*+(Z+p)n zGcHG)kIj}gk@-va5Iz_UmCi7B(sM-TG9gZ}QMBu+aG7*L>S^TK`ae}ldtf4`t3`*4 zS+Go=c!Y$kP>Ok=f!pk;I~OzWHnjn_M&IKy?9^)CuV?9YyHgdXu4(;7Bd5 zQBNYajdS@nDLd2>L`LZ_uqL%P^s?e#6x`!(UOu7E#8ZB2dT(B!9;#i)q>$wuuwA^h z1As!TH~iTQ%?dE+i+}q5Ts+rXiQ4Zbt;Os7rw1K@bJs%jRGxR}QP$xyB(hl|UGzI{ z_&}Bl{<|`5m=#psfJY=E?{IQ)LLo3%Td_LJuKal7>!>LA_aF(-0WAGk`b#2n8oQuR zBXSrK%_V)B-RXe|Lo6jl_-`$PR(VcOtlCKd8NuQV~m%VsU#5A;sxAif^%f2W!v zV6na%<#KXl>0(A?!t>d|Xs6GdrDS?=5%hQbgnWqO&}rE3oN3R2{281Vn#d2EoVz@B zFNsQTDcvkO^}5C)G@p3%M-UpQ=)qV!vgOej0_~u zxVm?()qPlQu+IR^jSYtx)EOOxcHyV4N>Mx8W1m86nCC2Aq}jL3u;Zzt0>tq%$*_Zg z&GV8S1T?JU?YpbxzgXO#7f|@|2zNjV06!N&KF*F8sq|(Fg7m&tlTDpz=v;hi6_F}?!{@{|?Ly{}xL_P%Q^5Mf!3Uv<6(a-(z0BoMwi+9SaqTkg#>?mqAtcx z7Vh2pH*2+T)_C~?zp_=^DTZ1|e#lm#W1_Vlgs`z7dTFc5)y!=)yBXI-q93sE$jN)W zci(K*?77VK`%s(xh#R+Q~3K z_SwGZ*lrDT=#Mw+#TV5Lh&{A|&l%X$hAv(%Jbc;)oh`WA`CHg`HO0zn^yJ?xXia%> zY$BfiLyFS#=9dCN5Pa)_=e%*kN9L;KaGTbp9fi%{(1NmOTlM$WOpd2na~su$2FzP8YrqpiD@lmitMf1)uah)UIlDowLgx;4CIVWA`=~L--eODx>>w0 zq42Eoza~BAJ$%bJ8Q@=ev~=X5hW6KsUuq+grCk-ylG{ChyStG|2W^?vp5IkS1!|R| zJSPJ+XDyG$!`L6Bm17Q=bH6bt)CN0vhdsU=$w}W%*ORs^itINANY8Cb2CVGrJspQ` zb)d7%O^4T_1pw(B^m`ENeE5N!-7XZc0m)L83yNq5Ii!L#^uAxITrXC#pbdEI`eu*v z#E0BJaTx@Uo~e9t8hIOS_`46)_Yv|b{mzas8ou{kUhRy)ro0!yLl7r4i6TRolRV}n zz-b$y`%$$Iokcs&O|=MfK(P&vM=x10xL%c2mnubaFlTN1%ctRr)FX*W-I!^U`wo+i zI-^egAkap=9LUdqa}}h(l>NB8Yf;Z7cl&ARwr@Ayo=ud*FQ^{V<~}t`@2c&7K7)kz zyBVdYim}v8y6~A}!9RB7>w@1h#(aCtmq=hdK;2j1FUGnr_YR@HWSDx=ZKq)<6Hr6Q_OlXKN8P8$@+TzJM)aIEAUWv3 zRqdt7&kapo0e$O~MVW5fCL9lD+K$`%mK__~j;r%g3SKioa1-)p~6CIl7WCx&<1X52k`&E#vUN_LjxZ=#tYs}e7C}f@Xbwd?wN6I)TQcH2O z@5phbWfo`MPTKAqrfOkfq9=v|)5=zU=+cfCgud1f%5fmbfuHk`W((P-W)v1iwI)-# zTTw^evY{)a)4mqLo2YoA7YM3Gxm#068=i-tQ=<$RvO;o68E$ctQBJ1Sa@yiRVIdk} zL=b9xV0Un+?$XP$2Q1o(0S4>|1Npxj?(l%Ge|wek#Dct)dyLE%#oYoGJE@PoZ|C<; z@)J&;GVmBE7WbN<@i=`{Eg{7Dbq{hzio)Y-6WX=!z)WCDZV)D?Ctnk;_MI}L>ZwtX zq3*g$rM9E=EZfxURP~agWyVx(C)$<#uvSu-H&`7L~=IWbY`erWU!GmxK~32z&7iUb+4*)M{62<(fbyUL}X z;gLm}Me|4C>eTss;;XQP>xoXUeV5lBizj>0%{g1R)I0IYWtBK63}X;0EhH7hLQ8V% z&Om<@Nl(RSGmZ4NM3d2HhT)ech{7#I(Uv79d#if5Ql5nb4U;ciMlm(CS+y)@o4N&_ z{#9|!`p$5O@O?)9JeGu3iqbtzYq7Wpi&>&;f(%-8*3}2kD_Px)daZ;a znk{{2M~%;IcIhlz@B$u?f|ir$Ee}Uwu6A6X!*;bG+>FQSp%Jg5dz~>OjdfER!Hgc2 zT^048Zs#3gx&VRG(F35LS%gfHvX}iqLC+*XDfZHS&(dK__!}bD{u5%5pkn z7n#LZcQwzs7b~;B)y6MFzNeECGlF>$ce|L_o+43@7eQsrt6(qxD|?McH8|!+ zi~&PUPFv{vaG(@l1+Ui{n-B=zCyWgUsRQv~->GuKGC1xZjYvO^bI=im)K{aT(C@qA z#}k2~RC=rwBn4zh)Cy?h$VQQ>9B05SnMGgDWEh*k-}&|hnc&GufLcy76!=D+pO()y zOV6e(>{dC4K*$4dzk9CM>Y`JxWx|WBFFz^D&<{W;$)#;>9HC)^Y0^bktoQ4W>w!j6(8#7d2(>HFoYbWxPa;=9VaWbohWgh0wIqJUyA;R;LdJ;Q%B>TbjyysI8lR36tBt z*F(=XO&(Q%$)4OFQXseJpCeeXN$>+qW61gL^>!B8eBL!fr#{c7gZUD!vgLgBYtI!S zXjja|Ll6cT2_qA}pijQTowea`BG`{%3k?X@5@b$NY`xD?3ST+0FjMxUZ$JJg8^G?S zw~Ia13HUvWu(o;x88d}GgT)xtGEhbJ3XN_Og2@`3`$~T3kNiRX{E+Q^ne~<{-`lqr z{HS=iS}K7}2@P4>3@Yq8rqv9HtLpvr)HJtwVkF;*rWtefVj9t?7M#iwaZ`?h@=sv4 zwfFU}Ei5Trm~;xVn}N$)fwy;pv`aaXfTUMiW{s*NVx5xmAPT3tJHUh9NSUd%+&HY# zxTMlL&3Kp3e3wt5wzgX|WBPF24sXDiDOohs$f4-v{q{2Yiuo^+g*TFgl8lZVV-vqJ z7Tfl^6QX?fo4Z#GSaGz9l`X#EdP{n1-QLt(U$$Iw`J@aC(U!xf4@(c%m)9e7zU!zC z4}7VdAlTeSKR)(VGCPJQzMyDAKe6#Rvp^scd|8b3jk6U-jeLDjbz0~5vRKWi&9lSw=8yHd5Ypk-r=N=*>&*L`*@5vnFxto1Bx7H98)pfdGR2n=eWjXGX?eq@pEG%q4pLag@G(l6N7amC4vea^al|i&J zo8DR}R@#f7i!z1mpj9l$6W7y3u_#7*Ctk;1O@MHwe38G#PD zXK4WD6J!+7$M8do`F=p4;H%MORtoN>AL4I6m)cIUrudR*Z*#v^Lk%)SC<6O8lf z=qF5psNO-g+DoF4qNl#1s1Lt+F2)K-O6F$0n}TiVFnd0FZQuw7DND&}`x&?2VW+be zzom_~X4GoV_&^Em=ntJ`SqcO3YRfQCKr@#(V3pLi*Rls#8-&yhpP@}JOnGZ{I=Vbv zd}nWmSOJEUkv$!{Z0u}J-TA?XZU4QlmL)iRbc%RTHQM_$e?g0-YfP9o(q!~+csQI$ zK)aoBALEJpAlRWN8Ja5%5zs;@9Z@%L=!8y9IRmRQ-hL{9+*0rKv)e7a!eJVPt$%h8 zvxlwXPV%n=toc+k6kgGB)4uzZ16)oi(Els1D|9?|dNg+I;Kvyr2u66}yDMNz{W9!-8T&0< z9`tLV5LKyQC`jb%NvOiU<7S9Zx%z-+2|nS_vTw@MU-zVdrvN5Yxqn*2m`yO0H5hc< zo?Mjk8+8TMg;C2?Dz5B1Aqd_vuUx41yZq#^ROedQSyiDr%6|oXUUOqQldf`eBe+=* z1TPO#@lWWV%VIh;asl>;g0>-AZY#M92GUD^P`#CM{+3l=v?B??h9y~ zMbgEK3L|ktg{6D<(H}cSKkutKzK<>;y{_P=omYFkncFbMmzW3essXsRB-@|bErFiYvPPVZ!)vc1PQ;Jo_0&@kl0D?z9*FXtQcPj ztMzyy*Xeb2Z>yFNa}rRlp@L4rW1|zNHFNrboj@s2ULkLv-tte{ciH$CTWz48mk9vt z>3;gh*>45~RB=G?or>l4@9C)bya_rZli4?X!4%^{8G0Xra}r?vb}LqHx4`-lEfi1u z*B0crsH33Mi*5^f(#Zkxv0M=zRWJ)NKuSM`p!~TuZ)JF-ZpEN_Mx$H@R^oUJwq&PF zXqpF@7wo>n&Vy0BRkahDEeT^h_1*B*3BF1nqd!9mt0btk=9%&sqL0g78^dK&I$Un0 z)}&%VO>sHP=(L831;_M%{%hVcQo`WDr-<*=OcL+ER{NuA&u}OEo}J0LFz=b4z>`&#jB*MLq2J&h!&9@o{VO zwYu({G*vbgPE=Qxu5zJ}!VmFiJOnOx$?15~i*MoiUoSoRKq;xb{iFVkFColaGzrqN z@>(D)dGes>A7c6{*LM4&*F#VDg(nJR*}x2?IR?4DvV@+1ON zfuGxXg4k8DO-p573F@$PwK^6%qc6$Ol*>RS%d^KeDH`{ncFrpoa#ww_LfVm-dbo)! zN}KX_*Qg-eJhvCZzLrP|Y|~@X&Xq*6>Jb)Mo#-kBQwo)OzFd&Ne^R?l_YJ8F!jZ!` z7u8U~7G8(S~@urM;F z7b4B;``hMIlP^ua4Uc16d>O9n8Jv5w0y1}`4c~8jHO&SJHBd24L8k6Hn4Rr{AV|=S3HYCloaak< z`wC}VdCjdWA7_6SXq0pqgE?Y@A$+F?N4>(LU#-ufDpwli9}@v=&6tBABSl$mx6eSm zYym_5K>|URD$7U9KPr9aJq8;WH-ac_UusZI!9EqfaS+c$7YR^V5$QyFWeg$jR{B*H z4a?hwrRGJqS|j>0NanjXQn4K*Pu6f{_|1i_xjrH?!!ws9Lj9w`_=A z@pXIADP9D)JMFL(*+HgIoweJ3Hw*{pgB4)VKkK zdwNC9X6lE|b^zGsSGab(>>#KT*`tn^kqRQ~OSE#1W7Bc^u#Qo{gLZI!WnNyALdg9t z=FQ>IVr*mnYCcH#iPx>m$foh}*%2;;9_(sg*SPIRPiq)yx{(?5Y%xorkii72G zv$3bKYY4;r{q~+Yw0drlXJiJaPo;(TrJ7Pe-(pJ?vLR0#;$v0IykGro{+7<-2}dv8m)YC4 zsesa{czQQjDu9Ldmh99J%9}1_5ulTe#mTnV;5*2{f=w9Wn*A+_xGPUfk`r4GB;`aEQkpd)ZSj8EYN`#wd6z05IlD;7Z|)jhM^WA ztus>Vv$o>r%7U#>)(htR(8rRRcRmV^{mk*()>Zd;3{J*--*OC~DdMH*YW91nUu$@P zY3I@%DnXG!TGKa7Q{{)wyDpS`Z@6vP-JITVZ3N>4f7*HIjIf4zi!W0YT*=5h%tP6G zevw9YYww^pMsHrTRb!24C}pXeA&L8W{u3Av1j!`P!q8dIANx%jT=QRzea8yLL-H7O zg)YnEQE+IX6Mv1Rr)9RV=|VQvMQ)BwUXCSh{`?g`#N!jE`E{jFp(jq8Z$-5dcG%X>nL1+YPd`8n>(p}-c@!<}9T(=L#1zT=fIv`13~G>80;F0BH6%20Ep=KO z0GZ3ZQBrTNe&fA}fKA)muLqLW{dQM!iR-v7NV5DEzKtTAdi(B*e^7KV$q>Wpkf7E| zb50UPwrE`>jhn@}gT7YNGlI_}pRK~_pY0h14X1m5V~>LQq1Za8oiPYIDa-f;sd#Y zcDUVzqhptwmjsumY>2I*T{fjxgzSjoa(m+-%2-VIR*7s=SYwXYpqp_z#WxF#s#Rd< zcmwlq{S(??Ak?uDAm$*K*I~PSOeW-Zb-SpbcjKMsE~&Ebf96|>O94G0T`GR?Co%9X zoT16tY0BM7k%kE`yzlA7YUZW8;uPL99k*HO?e?$6l$-oT9@^m_*(*^F_^g*M=v=>eI2o^n9%Pr5?lmlmp>E{s5Nj~x!};_dDqpH0koFDG0kXL zOWPnD#(!R|Bc>!zdfifZ0}bhnRv_su>9P?TJUn@xx&A&>MiT@u~uqLW{da5j3+G9YU>3JeCn1OS>p0UCopmL8 z3)Va5{Yq;o;M3uCTO0t}RY&%wMoh~Sh?-)n+8XMApiyATWal=`dP8w(gb=MsFVnoT zyPj>(f0(eoiiNac<1>?3RvTWUwe8gK{6LVn$3CVkXcye|KCU}O{9@BW9FhXOr@k92 z$DPX>kV3QT=cdV|v-k;`e6-VCJzeysOfh3f5$LtUOm+$KsZ4Lu_Fgr*(a(bkX&MW& z3X`J>3-`@I8^j(6nA*G)9+5S!viDxTQ!GibBAY}ZA^OYq_C2zqW>#B`MNA`9hJs>6 zU#L0`aR$>~az_kgNyiXVAFZ8m=*&88qt1<*S&_>P2MZ-82E|DJjZ|l5+vKpI>~DZ=Kxi@a-b-h5%ME5J4XTS`&6 zZoq&RFO}Z-dwWjt-9z>F7N3>6E$oEZazGU>9TTV+`7({1d45!fbtSnpsc-`1EC1JqGzR>|7byEk!PP2vt36DJ<{bj?GRJu-Ds4qfdx1-m^^NoE`-XN2CT6~CW{)68e>}wpg-DpXx=y;3)#Prr zT?F!FlC3wq&qTT@3`8Rb*LA=^E4-!hi~CT z-&zk1$K0(dGS9I03{T=eGr=1MEJS;SNgMh)qtDWPFfIo|U5w&fjHgyMTYI*0Nyn<)KQ&tm=LitCT53i%K7fgfu<3Wf@sP2)f1t* zMJYz^w2-9yd&E#<*)YPk4EL-j=I2 zp{YK3I)Bny-&{u7csL1VgBG)wR{T;j>y`KvU}i=5tm*Iwk>8Vs|k+7eXO0ndvY&uPPR?yvQV4#3s%v-inRcYoC_suE5G3pt*+;hn$H zUP&!JAzC@W8O-vFiXzLSiHW3@U7<~Gdgub%`9&4qzrIwxBv2PSJ4#?u0{uE{apj@^ zwyKYp7pg^U6s;-fMC;QXaLcvNuN{V!VA$VW)3C7H&`%$o-Qa4SnWgNZG4^B#^g0ut zjn39cPK=@ctIinZ5ArI+us~YqRc}Z!Az|An>^FQ%xd;7#SBo)ivT$l~WqmCManNy& zX!1q)K2z9gBHGiqbT7K^UU)55pY62%CMtnMS~}=~&pi<2&`+t-D*n-#X1^L0nkQw! zb=}{k;epXO=~*xa0J<2L;R#e!Vf_5JeritDJ6o3mvOmV@qkm+B$RL*Y(Z+oG&ktt0 z!_{P!Yjgjmtqh!X+v1vsVJO?@%x~+zt_O8)!%dXRBz58{{hr&O1_%#~T7aO2s(yX8a?l*)v6m#lqT zDX6HNHn|CZ(<7;KDvZ5H5jTh#YJi3sGuS)bd?jf66en(W8*X(PcwqNqP^(eFCnh*6 zTPHBZ-E|Qrpidq*m@tD~HB2F8`%H3BJbFCsI-{NhaRA*g6YSdgN)|x-^{*HH5P+?C zXp^t?t{mAd&k{X0TNMs_H#56kT>DZ#d#!^qWye=gyiIiR@haS)Jc=Ys#TFSR^5OQGeh)Gwp3p0MdYBY7OnJZB0jKGQeSC zNcN<0+8LknO^1iTe#OM*nFr4bb`@uxjKvZm|JCkK%VZ7$6i>!k;5rTAu5d?%tWw6g zt=b*h-Jd>Ijf09>^zqdp15Zd-73lirKx>XCbE{klcSS4ZxEBN8*+EP7Xz5`_o~eRT z)AET}A0FWCGV}k10K~FZJ_Q_g$1yj0=ygBu&-E{Ra{O+|K_d|j^yd7TjDFJYZ+ZGBG0$k9r!7sDI7{D8-G?mk-p+JcU(&G z!QapOtm(dwXu}N}8*Y{FzXUM-rn)=fsJwB2=TzUyXh3n%mz(fN+kMD+E(Qn=vw@_b zXUSDXb-Ch|af_yA;SXyiT;Uchm29$HX|4?HE?iDGljz24%o1`JV+~l9myD4}yx+nd z3^ zuvtE%$N_pOfkL z=U^?Ts`-NT6!z?2f>=qXit4W0OMHwt*u>A-_zk#3%QUpP9B zBT#hpp_x_2jrPJ%Ivy?Vj&@(IL-Bd{tf1qKqMf7lFrp{%Jwb`WtE+t|Ig?=_Ia$M_v!=(6YVI{W z?lmyvMz!}3U(ZU12zQTf2GZc!o@_f~#$m^Qs6{*?l}_b&u{r5$SpyXz%DuVOtz1u%iCx0XpHy*s>u=Yz`Y6ztlGP zP#8gf893Kf%1AwWn}P%>vHCu zf@Snh=Wv6Gv{AYLHTxA6XNW|G2x z!x&&kMEPoT@6`rN#ph?aBoag)jEutJ!t;w(!SOHfcwJSjB!YlIEXNbE`;bA0>S0?w zmkKe;k~(&RCoiGD&g>b>y(^pHzu03^`gwVRM(iSMDcq&>pS!aOSh?_U^TZM)bYX_9 z`gI(lzb)6N*|GVE!V2F$a&T6yCrUlRE!W2jPl_MF2r(QCGZ@6m2$wA;Z}@KiG||L5 z%-EXa@g2MvZ5HJiZdOs%&h-UJylPb|zsK({o#+u7W(qbx|D=>b9xu$p;Wal;s)DK1 zi;ir~>SVR`rtMQ8_t*}^^4_Er)l$#wv?)5-up0B+2|^fO+AEt1Xy?qV<@T1X=w{zz z!G|K`@y($20XwMgiMTG{06`lW;-NzRlTDCNpm0 zYznetu>CM{(X4iP63P%pvt??2qFrEsXCB6xzDvohwz_BMMV@mMw+LGa&U5})TF}quF=FDk_9~}1H!*++63B)oqR6uKBMi^jtx;&0q5a!%L z)9^DTb;1vsL&x<&$PVTpN%3d5SJEldB#gCP80E0I$Lq3$t1l%fxT~ZboJi5zGZUeG|2~}-vVCAX*hvN3qS~h zMehJS4r3iR-s>y6={U6H#IM{Nr`onn?#G4`FVHx@ib%H?`4M6CT8L&(tUjK*zC9s^ zwL9Uwu6>!$@Z$YnKjs^P`2g;4vWiSmTX*Efw`#Mx=T;xLd#G(+eVQ)`dwpR`U1scG zw(e)=^Qjr@s>FmuLGt0WG$?y~_#a_58QE>5?L~HYMVAn#ql2w9xm=2gi0BT6MQ|yI zgEfP3OaJw>a0~Xs9(?euGxeL>h57pS4#)LVWd6DhtC?7aX_j;;joJpwIz}gf5`+;> z#v?nL4Iu}1VYv+PFA(Z(l)#gp+mdqM$bJZa{2}YQfjOR&ju{}8v_6cVtk+#RUx zmRN|<8#@_jD9!>gkYu-1!;2iXH^TJ)AW=cFD%=0_=v)A4&~UBK=7x*KzTxWD`<96@ zli-t<++b7ad?)edwFZ{6HJd224P7Ke6VDVK38^B%b87=}>u!J2pT-!Vm7eR~$y?8V z_`9Z)I2dn48VUM2G>0K(#3V10vBUt*Bdqq1B{I_I-u_AB1y?5c_CW{t@nBqE1gzfD ze0LeE^VaQRSDFJER#(hs3AZY~kAy@&IX8Z}cb~xfP{r!fd1034;B=DrxTtuRo#V7G zjn95x7Axhl{`TbD`-%yV^44PK+RUCCsZ@zrT#+WE;bNsttbk0i&TFH)(9t3QK6?)d zNyT_)V}E)wO!J~!<5-qYl7r1*!PR|ccJ+n`PWd^hz4F8oPJJdnfu!98X-05cRc5OB&^lXja+EC#W7c^H>wi%$U2Lz zfGaZBsW6t2p|r&a2}u_N4sUdBExCckdLM^Duadl9F;zUS>PtI6TDm>oufDzF=f9jA z@xAtDc0O{6KFUF>@+~x*i6rP!>Rm{)AZS)g@z^hr*Z}WrE^!Je+VbAd>%U!sT3{Z%lE!-mbJ#Mc^u55O4I@4XN(QPDEuWK0M`aec5DA4mo z$*M35&fy{omtLyG4rY@Rd1iWTd^X4$DG^)I$k@xZ<;yjFBoCC78yy1+T7-n_86kmYk+H5-72Z}ir-B<=&(2iZeqiNL;rD)B-+blaxpsISMKVzDcrX(p0r{mq0s9yb;o}a5Mf_L1wG4rdzcyi#FUt{Vlsj=)l?Y4FH=DHDf zP;%Ryy+Eve8zg(|wY;U}3^|T$WaW0Qb28ne!t1%c)P$e%U#2WvUOAt7?(5wCZn?c^ zEVr&>xgDN9GD6~jZHAIx>~%KYQmv<+abt;!YI~hWiF#iL6n8IqyPcOe8{baru2Ftr zk9>%PRF-Gno4w<{v*T%_I|pqjy;)EDetXP!AmDskKL=fy7@yO+UGiY%U#K&@zVba+ zFkTBKPP^`Hjl*nkg8x23M4YbipHT-|ms@E~W{31AA!`;$g^-(tQm9YFQSjG6Iin?2 z%38!ok&sj~HjmF0NCs78+0aP(mG}$257cVR^NOVjYMtk2N7Jsh<`cFWwhEY%krK-| z?mJkPacaxZtujhUMZfz)LTco^nxWoroJr3)yz3w%;pxR8TeZ8rr-(iZHaB0UrnsK} z(D`plC4O()8zIZ$h(-^!voco&S#RvxOkN$xeCiHTm+H(&VidL3Amg3Xg}sX0TXnfR zlYFtaGcA)lR-z>?MH~_NjcK2M5gj(e90RG4y-K$Hvjz%^*3fxtUnY{iG_}_r(-o!b zUv5Gcu2+j^ttB~-p^?EMHJD*0AQAx&!@c%%qqMl{<;rs$aM?NQ-0&|r z^yG-|#-`>TOoEvs(quYV2xGbcO!o$ok1^^S(=JtMFYI!>*s-4A7L=b%9A{sC*66Ox zW|-@DL_$J}h0j!!o-U$I+_pp|-3*r#q+PPfq1(jt0Sp>z@JdL(?s)=kM?&I)qbhbY zsEo$oI^O;M%tof*sgWPG(8yy3o`h7DP;`+jB)4`^su^%c&`3>>na817dn>v%55O;* zAk{hAYTt;`T*c(VtOD>qNF4RQ$pRvWKg2k=Qsl1y34~D5uTSj#CsNe0LX)^6~hn zT=`cFp75@pEvn27)RKMTcgrvQhs+-PZZ)uUZe}|)=6`VEXYMy5$dAzdJCNd7sGqZC3$#y8`^$&>> zX274XAfxfY6wHQgOk7}rA^PRHOC4YzKlQ+8#C-z5)t@nYy<%Y5naWm{vZZHI>g3Qe z>k5bTdXt?40?j11`ipsUI5Rj;AW0fJXTJ`)9Epjk9Eqt6hm27MEw93+gbKb&7P|dV zO`fTbhiJmtCw09VE}GH)y=XpY9lCHkUfTUiLPL3@BC?H6q4pHlKQT)qQbTx>2tw|u zftiT>3Ou0d>ntkj1*%m({tw9**xttKvX9+|R-f^M8zU{)=1NeEviRM%`i$A*vJjiu z+cOg2_t=t1H9u;(-OfHWy}2|XqVfGy`d@BaI z{-KzM;&=KC>1kvI3i#(A@;_$@h~4oV(&z9yMnXb*E&hk71tTGMzrK>RQ)@v5_Dg`ufZviPSX%1&>B?v&`<+Pgu47RqDZjZR`I_<_;2tLBUS2mlH#ZK3hD8pBMcE7? zE{0~O^GhGg!Gvj6^}u3o3-OWINo~ovJ7G6tQL~=Py<5wqr8Yeys}YI+g8;c#tgeXb zUFwko4WGSlKzfNpy*97Qo4+@=pKTIYXcDL?D^sp1^Vtl{k`}7^?@>F3bN>xf-KNc6W!Fa|*OeI{8D1d27rki`TN*e*RIUS}^Wt z>*C43`W0|&crRQ2;N$}5fnJSZtY*Hmv*>YZ@rpOi^jnSH&?Ez`Nsk&Cqqc2qsEq7n z9W}3cU6SF1Ca)LM)`4HFv`n%^;A|FMpj!&tG!93%W<9r6V%3+f#Et-k-DAJlx8=uG z;>9QCP1%malZ{T+e>qcmG*+aJxzgR*Hdn1C3s^hClLQcP$w;BT}X=w$Mm+Z%xTLvOmRww&?h!p7Y38yLZ8p60diT$X}+62y(V7n-P9fWSb zuNGAtMPY1Y1hqh@?Y4Et4>rUHmAvAxK4SaF-e`R*&4b!1nD?5w#xnY)1J3l`h3sIPwc+dzEWS7j zpCpA>hxfXjg9Mfc7U}J{vYc{iRlRkB0q2_D+u4_$JU)TN%|?PV*9Qh0T#pb?;_6x| zxR(%w@ZAY~Erj>_l+(5>%k2Wzw;o5_a2x8t`|VE7WmL9^*`5iRvdYn)h6SkKkrTb@ zC{e<}2X`uYajZXf%>awV6L8@F&K42Oc64^kl584>&(<+&kxEXSUNrR=A8%F2h*)Ya zL@^?(bWS35g%-Qj6W?;W9c>hA)g~r^ryx}+7dZ&e2>K~vJrBAp*cbG=GyWQ?OYyo`5ss3_VGD*ZV_mbtXwQTA6Jy zd#YnjpXy=ivEqzLKi5xNKz!y^ARGx%H3^Q-h8J#r*$?pTP@Q1iFOJy1Ki*-d!D8z} zu`XPAJvPKjY+b+6y*{us z4ptt$GOq2iidT{HUNXtFdy@^SK&SQgV*;W;ra`rP7vG99sA=_2eL5c|o@(-t1)X9{%$!Bf5wnAB<&)?;)41Iew<|Ie(j}@j>7L}M2>34Yp7#VrO%BV9;4+se zC*-d>V?i1`S5fWcR+T1?QslWOHougZmSvWeD5_m)mJlXd-A=>|o{Em=1!5f%&^0(| z)={ecFlCkmi#Rr5=-FmuEfI(v0*~W;Be!E+Ut*dVDye-ak;j?f!D0SDZ;<^^LV8pW zNIV_Hl>lG9Qk2mMEB?sC_8C6sNTYm0GtC}y6;_`h@2RC4v)A(F4 zPW?Se;W38>;0=uSn}ZFL!x9Y#?Zd&wNyU#L1Qh%gP}dQu;N!TUB1yM0-5Q6D+5Qe1 z%yrtV6VBi#-%DO*@MgdtJ}mnQoGZ@C+ISC+g4j;cppHxfp$uJHNAFU6VvEU%g|G~`=rPM9as(*y&Vi++ENO&a$J#4ne8d41GsHj$DnvW2UN78N5gd-+ue zbL^3Y^v#JpEUIKDP3&eT-Ly=1aaXUjl&EtFRZJc1tN2K1u2#mnoRw%@>9Ag-)=0^! z+W~N>65{9(14=pB8giZ^)5VrmWE_IW0=A3Gbs^c^#Vt`j+iVVz|Ijzq+H9vi(@cX{ ztCpS}yyeiexEf={&oHFP*s$ULJ^k^Kl!tq)<`fd@4%-P50%>_(L#KNl-HA0 z+K)U(%AGBC1tD&nBE}b)okXFDO{ao;`FI4k%v$`*My6GlKFvp~?*_?E$7T9yZvnei zcFPwG+Q@TzzTKup;19^gjeZf9?8zV1OQhs}<(rEu>1m#b8PvGM82ipddp2j($s}<= za&t*%5sNl4yZqID&r&dZ$kIRPlY!uZM4V!V=RAOXBMDv+Yi_)pKZBX}SJpVxY z2tL|0A5|)uTqY3>Bc7`?SFy)&P|RXYjE>b*-u)r>HuHR;{w-!%X?srG^VwQI(?l6{kK>ZP3$Q+O^AzCBPCPjUZzLBo znE2u`)HHD*UmCZw7kyzQ*6Z02Ys%P(mD4$gf%NFJ?q2O$1WJiaC|+;>p852;j61iM zlkLT-Iy~^NZ~IxfM*pu*@c-Gp70?~OpVh5i_Hmkni;GXq(xT2RW~4!)<{?s{G;p;4 z(a1*&%#e&O=6BDP?&wtCztL$ptpP$Y?~5R#R;`oo;>|&B6AIGAoeLlS-nTR$yHrq- zM$7&*90iEg<);`iBO50B0<#gZ2#hRw+Ht=|j%Znx649H4#TEw|k0%e1VAOZd>3!Vl zejvB4`bl%()kofs#Vby?7+ermibluP_O1SSq|Y)@z{58e{e&3&N|C}p(@DbMq^m|q zr%1!*rF=@oA!+@~gIsRp-0*#=noE}H&nt;7RJvpCJmu{C^EuyDA`RTMlO;U@Sx&xz zB_9Y0YaN3V^==&$s(GSm0g;w_s6MDwlHhxk?rGzv~s}vT<7f6k#!$Pyr zN@9W*!bAxCi3kc~J7>dQ@tYjR?~|?3WkJ4E0WUGX)4>Y)bLE|{YM=t*$mzMfrltuFev!U8<`6GHijVw!)&De8So2^o7;`?4a>x1fhe|5@$d?j?;mO z+|(~{x8RSL$wDewZ$|2DD|z_bSftW43ntQgQ7Mp-%)bGeR>fi5vKWcaGcgsPA1L{*R_Z=pk5kU7ucPZ%>U!a{-r#U1D<447=)Na`FF~eFg%5S|*TatjGp@5B*BEU9R7%jwSX9z3V@IDVlbo(R76 zyC787atv<4HhaNH#YoC#_sodKJtXshyG4=NeQ2+5mHYH~UDdSa4Z9qn+1fMHggBux z&!4p0^5;KyG1kpj&u)SggqX~p7pBOBDZofDcI!9gq%0%HjHdhgeLiIj3mxXJnw08W zeb7V9`oF48Y?RqTrdz!pH?q`4(q-7ppWNCH%McCQnW-$OeuVUSO9kY~IDfG!Re#<5 zqMw1f_kuLVU@~AaAi^BW9qDtZSr**|AixJoFX?vpAervHm3h&^3`oB^?tJNcz5Fb( zn6@>Cn9<%fd{|L>w+|9iyYPe@eGpX#*UuC99Objq6NG-bPg zb=>|e%QL1(JTo?C4}-(3v|N*s*83bU`NuDj+Q%o^?< zncUo8ASQ_u0kymrgVYxoJ!9Xz6Bb^9t(SE8pJudq-Hr zd)39HpZH#qG+Nt}d7HqNeHeVO*svOZ!MDRQf`*9}zVD7tC4b-5 z_TrzMiiB-$uVoOX!cH@)n``I2ZW?b5=6-(|9`WZqJ#nxc%e9NBQvOavW;pF$ILz&U=hg#^G!(p`jrmEV7o+YyB(~ zLIp*<)@QL+jLhLYI0}u5p*yCiKFkxmIFcbL?0e#|y;&1%AxpAe8?sQp`nY6#PUF&O zpiPwjYNxy5l0+@>M3d!Dv=?^d^nBza8NQGGL5%1B*hcZV`7b0aukwwq0Er}f<#pt=s&-;&I!&RFpNhjn=13e}f^lf1lE%(44X zb1U%a%egOgr+NQsTe5Cd!kcfqC)X)0x9fUW|Ky_Er=lN^XUfL!o>g79(p~@AV&=?R~j!`T6hP`EI3K;1p0={86)cK~BzX=kN3X zf8?K(wPoXyS8o@W$5vFox|;I$(pzi0s`OQXOUiElVXy!Acx4*r?Z$TYbN>GWtNM@K zJIlPYRkyg-+HUWTOwXxzj%?fcDqiMhz>ljx949-=-i-Kh_1KBUKX&esw4a``^RJ>* zXwhtT%ei{n#FzEH|C;yZ>+$!u_x#*+`=L8{b9SH^9&27u3G_Gxqxe`L2UJtdxghk z&-wzDFvLvW{chK5u3{n6GSKKy!P&C6w^IFpbD0bcp^A{{2lcLh_DXj@ybtYvc^;(2 M)78&qol`;+0Fu7JivR!s literal 0 HcmV?d00001 diff --git a/docs/output.md b/docs/output.md index c3889e8f..266a800b 100644 --- a/docs/output.md +++ b/docs/output.md @@ -10,50 +10,59 @@ The directories listed below will be created in the results directory after the ## Pipeline overview -The pipeline is built using [Nextflow](https://www.nextflow.io/) -and processes data using the following steps: +The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: -* [FastQC](#fastqc) - Read quality control -* [MultiQC](#multiqc) - Aggregate report describing results from the whole pipeline +* [FastQC](#fastqc) - Raw read QC +* [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline * [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution -## FastQC +### FastQC -[FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. +
+Output files -For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). +* `fastqc/` + * `*_fastqc.html`: FastQC report containing quality metrics. + * `*_fastqc.zip`: Zip archive containing the FastQC report, tab-delimited data file and plot images. -**Output files:** +
-* `fastqc/` - * `*_fastqc.html`: FastQC report containing quality metrics for your untrimmed raw fastq files. -* `fastqc/zips/` - * `*_fastqc.zip`: Zip archive containing the FastQC report, tab-delimited data file and plot images. +[FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). -> **NB:** The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. +![MultiQC - FastQC sequence counts plot](images/mqc_fastqc_counts.png) -## MultiQC +![MultiQC - FastQC mean quality scores plot](images/mqc_fastqc_quality.png) -[MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarizing all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. +![MultiQC - FastQC adapter content plot](images/mqc_fastqc_adapter.png) -The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. +> **NB:** The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. -For more information about how to use MultiQC reports, see [https://multiqc.info](https://multiqc.info). +### MultiQC -**Output files:** +
+Output files * `multiqc/` - * `multiqc_report.html`: a standalone HTML file that can be viewed in your web browser. - * `multiqc_data/`: directory containing parsed statistics from the different tools used in the pipeline. - * `multiqc_plots/`: directory containing static images from the report in various formats. + * `multiqc_report.html`: a standalone HTML file that can be viewed in your web browser. + * `multiqc_data/`: directory containing parsed statistics from the different tools used in the pipeline. + * `multiqc_plots/`: directory containing static images from the report in various formats. -## Pipeline information +
-[Nextflow](https://www.nextflow.io/docs/latest/tracing.html) provides excellent functionality for generating various reports relevant to the running and execution of the pipeline. This will allow you to troubleshoot errors with the running of the pipeline, and also provide you with other information such as launch commands, run times and resource usage. +[MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. -**Output files:** +Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . + +### Pipeline information + +
+Output files * `pipeline_info/` - * Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. - * Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.csv`. - * Documentation for interpretation of results in HTML format: `results_description.html`. + * Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. + * Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.tsv`. + * Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. + +
+ +[Nextflow](https://www.nextflow.io/docs/latest/tracing.html) provides excellent functionality for generating various reports relevant to the running and execution of the pipeline. This will allow you to troubleshoot errors with the running of the pipeline, and also provide you with other information such as launch commands, run times and resource usage. diff --git a/docs/usage.md b/docs/usage.md index f1549659..6b8e22e0 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -8,19 +8,63 @@ +## Samplesheet input + +You will need to create a samplesheet with information about the samples you would like to analyse before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row as shown in the examples below. + +```console +--input '[path to samplesheet file]' +``` + +### Multiple runs of the same sample + +The `sample` identifiers have to be the same when you have re-sequenced the same sample more than once e.g. to increase sequencing depth. The pipeline will concatenate the raw reads before performing any downstream analysis. Below is an example for the same sample sequenced across 3 lanes: + +```console +sample,fastq_1,fastq_2 +CONTROL_REP1,AEG588A1_S1_L002_R1_001.fastq.gz,AEG588A1_S1_L002_R2_001.fastq.gz +CONTROL_REP1,AEG588A1_S1_L003_R1_001.fastq.gz,AEG588A1_S1_L003_R2_001.fastq.gz +CONTROL_REP1,AEG588A1_S1_L004_R1_001.fastq.gz,AEG588A1_S1_L004_R2_001.fastq.gz +``` + +### Full samplesheet + +The pipeline will auto-detect whether a sample is single- or paired-end using the information provided in the samplesheet. The samplesheet can have as many columns as you desire, however, there is a strict requirement for the first 3 columns to match those defined in the table below. + +A final samplesheet file consisting of both single- and paired-end data may look something like the one below. This is for 6 samples, where `TREATMENT_REP3` has been sequenced twice. + +```console +sample,fastq_1,fastq_2 +CONTROL_REP1,AEG588A1_S1_L002_R1_001.fastq.gz,AEG588A1_S1_L002_R2_001.fastq.gz +CONTROL_REP2,AEG588A2_S2_L002_R1_001.fastq.gz,AEG588A2_S2_L002_R2_001.fastq.gz +CONTROL_REP3,AEG588A3_S3_L002_R1_001.fastq.gz,AEG588A3_S3_L002_R2_001.fastq.gz +TREATMENT_REP1,AEG588A4_S4_L003_R1_001.fastq.gz, +TREATMENT_REP2,AEG588A5_S5_L003_R1_001.fastq.gz, +TREATMENT_REP3,AEG588A6_S6_L003_R1_001.fastq.gz, +TREATMENT_REP3,AEG588A6_S6_L004_R1_001.fastq.gz, +``` + +| Column | Description | +|----------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `sample` | Custom sample name. This entry will be identical for multiple sequencing libraries/runs from the same sample. Spaces in sample names are automatically converted to underscores (`_`). | +| `fastq_1` | Full path to FastQ file for Illumina short reads 1. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | +| `fastq_2` | Full path to FastQ file for Illumina short reads 2. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | + +An [example samplesheet](../assets/samplesheet.csv) has been provided with the pipeline. + ## Running the pipeline The typical command for running the pipeline is as follows: -```bash -nextflow run nf-core/mhcquant --input '*_R{1,2}.fastq.gz' -profile docker +```console +nextflow run nf-core/mhcquant --input samplesheet.csv --genome GRCh37 -profile docker ``` This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles. Note that the pipeline will create the following files in your working directory: -```bash +```console work # Directory containing the nextflow working files results # Finished results (configurable, see below) .nextflow_log # Log file from Nextflow @@ -31,13 +75,13 @@ results # Finished results (configurable, see below) When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline: -```bash +```console nextflow pull nf-core/mhcquant ``` ### Reproducibility -It's a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. +It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. First, go to the [nf-core/mhcquant releases page](https://github.com/nf-core/mhcquant/releases) and find the latest version number - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. @@ -51,7 +95,7 @@ This version number will be logged in reports when you run the pipeline, so that Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. -Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. +Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. When using Biocontainers, most of these software packaging methods pull Docker containers from quay.io e.g [FastQC](https://quay.io/repository/biocontainers/fastqc) except for Singularity which directly downloads Singularity images via https hosted by the [Galaxy project](https://depot.galaxyproject.org/singularity/) and Conda which downloads and installs software locally from [Bioconda](https://bioconda.github.io/). > We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. @@ -63,27 +107,20 @@ They are loaded in sequence, so later profiles can overwrite earlier profiles. If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended. * `docker` - * A generic configuration profile to be used with [Docker](https://docker.com/) - * Pulls software from Docker Hub: [`nfcore/mhcquant`](https://hub.docker.com/r/nfcore/mhcquant/) + * A generic configuration profile to be used with [Docker](https://docker.com/) * `singularity` - * A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/) - * Pulls software from Docker Hub: [`nfcore/mhcquant`](https://hub.docker.com/r/nfcore/mhcquant/) + * A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/) * `podman` - * A generic configuration profile to be used with [Podman](https://podman.io/) - * Pulls software from Docker Hub: [`nfcore/mhcquant`](https://hub.docker.com/r/nfcore/mhcquant/) + * A generic configuration profile to be used with [Podman](https://podman.io/) * `shifter` - * A generic configuration profile to be used with [Shifter](https://nersc.gitlab.io/development/shifter/how-to-use/) - * Pulls software from Docker Hub: [`nfcore/mhcquant`](https://hub.docker.com/r/nfcore/mhcquant/) + * A generic configuration profile to be used with [Shifter](https://nersc.gitlab.io/development/shifter/how-to-use/) * `charliecloud` - * A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) - * Pulls software from Docker Hub: [`nfcore/mhcquant`](https://hub.docker.com/r/nfcore/mhcquant/) + * A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) * `conda` - * Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. - * A generic configuration profile to be used with [Conda](https://conda.io/docs/) - * Pulls most software from [Bioconda](https://bioconda.github.io/) + * A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. * `test` - * A profile with a complete configuration for automated testing - * Includes links to test data so needs no other parameters + * A profile with a complete configuration for automated testing + * Includes links to test data so needs no other parameters ### `-resume` @@ -95,29 +132,140 @@ You can also supply a run name to resume a specific run: `-resume [run-name]`. U Specify the path to a specific config file (this is a core Nextflow command). See the [nf-core website documentation](https://nf-co.re/usage/configuration) for more information. -#### Custom resource requests +## Custom configuration + +### Resource requests + +Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. + +For example, if the nf-core/rnaseq pipeline is failing after multiple re-submissions of the `STAR_ALIGN` process due to an exit code of `137` this would indicate that there is an out of memory issue: + +```console +[62/149eb0] NOTE: Process `RNASEQ:ALIGN_STAR:STAR_ALIGN (WT_REP1)` terminated with an error exit status (137) -- Execution is retried (1) +Error executing process > 'RNASEQ:ALIGN_STAR:STAR_ALIGN (WT_REP1)' + +Caused by: + Process `RNASEQ:ALIGN_STAR:STAR_ALIGN (WT_REP1)` terminated with an error exit status (137) -Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with an error code of `143` (exceeded requested resources) it will automatically resubmit with higher requests (2 x original, then 3 x original). If it still fails after three times then the pipeline is stopped. +Command executed: + STAR \ + --genomeDir star \ + --readFilesIn WT_REP1_trimmed.fq.gz \ + --runThreadN 2 \ + --outFileNamePrefix WT_REP1. \ + -Whilst these default requirements will hopefully work for most people with most data, you may find that you want to customise the compute resources that the pipeline requests. You can do this by creating a custom config file. For example, to give the workflow process `star` 32GB of memory, you could use the following config: +Command exit status: + 137 + +Command output: + (empty) + +Command error: + .command.sh: line 9: 30 Killed STAR --genomeDir star --readFilesIn WT_REP1_trimmed.fq.gz --runThreadN 2 --outFileNamePrefix WT_REP1. +Work dir: + /home/pipelinetest/work/9d/172ca5881234073e8d76f2a19c88fb + +Tip: you can replicate the issue by changing to the process work dir and entering the command `bash .command.run` +``` + +To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so based on the search results the file we want is `modules/nf-core/software/star/align/main.nf`. If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. Providing you haven't set any other standard nf-core parameters to __cap__ the [maximum resources](https://nf-co.re/usage/configuration#max-resources) used by the pipeline then we can try and bypass the `STAR_ALIGN` process failure by creating a custom config file that sets at least 72GB of memory, in this case increased to 100GB. The custom config below can then be provided to the pipeline via the [`-c`](#-c) parameter as highlighted in previous sections. ```nextflow process { - withName: star { - memory = 32.GB - } + withName: STAR_ALIGN { + memory = 100.GB + } } ``` -To find the exact name of a process you wish to modify the compute resources, check the live-status of a nextflow run displayed on your terminal or check the nextflow error for a line like so: `Error executing process > 'bwa'`. In this case the name to specify in the custom config file is `bwa`. +> **NB:** We specify just the process name i.e. `STAR_ALIGN` in the config file and not the full task name string that is printed to screen in the error message or on the terminal whilst the pipeline is running i.e. `RNASEQ:ALIGN_STAR:STAR_ALIGN`. You may get a warning suggesting that the process selector isn't recognised but you can ignore that if the process name has been specified correctly. This is something that needs to be fixed upstream in core Nextflow. + +### Tool-specific options + +For the ultimate flexibility, we have implemented and are using Nextflow DSL2 modules in a way where it is possible for both developers and users to change tool-specific command-line arguments (e.g. providing an additional command-line argument to the `STAR_ALIGN` process) as well as publishing options (e.g. saving files produced by the `STAR_ALIGN` process that aren't saved by default by the pipeline). In the majority of instances, as a user you won't have to change the default options set by the pipeline developer(s), however, there may be edge cases where creating a simple custom config file can improve the behaviour of the pipeline if for example it is failing due to a weird error that requires setting a tool-specific parameter to deal with smaller / larger genomes. + +The command-line arguments passed to STAR in the `STAR_ALIGN` module are a combination of: + +* Mandatory arguments or those that need to be evaluated within the scope of the module, as supplied in the [`script`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L49-L55) section of the module file. + +* An [`options.args`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L56) string of non-mandatory parameters that is set to be empty by default in the module but can be overwritten when including the module in the sub-workflow / workflow context via the `addParams` Nextflow option. + +The nf-core/rnaseq pipeline has a sub-workflow (see [terminology](https://github.com/nf-core/modules#terminology)) specifically to align reads with STAR and to sort, index and generate some basic stats on the resulting BAM files using SAMtools. At the top of this file we import the `STAR_ALIGN` module via the Nextflow [`include`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/subworkflows/nf-core/align_star.nf#L10) keyword and by default the options passed to the module via the `addParams` option are set as an empty Groovy map [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/subworkflows/nf-core/align_star.nf#L5); this in turn means `options.args` will be set to empty by default in the module file too. This is an intentional design choice and allows us to implement well-written sub-workflows composed of a chain of tools that by default run with the bare minimum parameter set for any given tool in order to make it much easier to share across pipelines and to provide the flexibility for users and developers to customise any non-mandatory arguments. + +When including the sub-workflow above in the main pipeline workflow we use the same `include` statement, however, we now have the ability to overwrite options for each of the tools in the sub-workflow including the [`align_options`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/workflows/rnaseq.nf#L225) variable that will be used specifically to overwrite the optional arguments passed to the `STAR_ALIGN` module. In this case, the options to be provided to `STAR_ALIGN` have been assigned sensible defaults by the developer(s) in the pipeline's [`modules.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/modules.config#L70-L74) and can be accessed and customised in the [workflow context](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/workflows/rnaseq.nf#L201-L204) too before eventually passing them to the sub-workflow as a Groovy map called `star_align_options`. These options will then be propagated from `workflow -> sub-workflow -> module`. + +As mentioned at the beginning of this section it may also be necessary for users to overwrite the options passed to modules to be able to customise specific aspects of the way in which a particular tool is executed by the pipeline. Given that all of the default module options are stored in the pipeline's `modules.config` as a [`params` variable](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/modules.config#L24-L25) it is also possible to overwrite any of these options via a custom config file. + +Say for example we want to append an additional, non-mandatory parameter (i.e. `--outFilterMismatchNmax 16`) to the arguments passed to the `STAR_ALIGN` module. Firstly, we need to copy across the default `args` specified in the [`modules.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/modules.config#L71) and create a custom config file that is a composite of the default `args` as well as the additional options you would like to provide. This is very important because Nextflow will overwrite the default value of `args` that you provide via the custom config. + +As you will see in the example below, we have: + +* appended `--outFilterMismatchNmax 16` to the default `args` used by the module. +* changed the default `publish_dir` value to where the files will eventually be published in the main results directory. +* appended `'bam':''` to the default value of `publish_files` so that the BAM files generated by the process will also be saved in the top-level results directory for the module. Note: `'out':'log'` means any file/directory ending in `out` will now be saved in a separate directory called `my_star_directory/log/`. + +```nextflow +params { + modules { + 'star_align' { + args = "--quantMode TranscriptomeSAM --twopassMode Basic --outSAMtype BAM Unsorted --readFilesCommand zcat --runRNGseed 0 --outFilterMultimapNmax 20 --alignSJDBoverhangMin 1 --outSAMattributes NH HI AS NM MD --quantTranscriptomeBan Singleend --outFilterMismatchNmax 16" + publish_dir = "my_star_directory" + publish_files = ['out':'log', 'tab':'log', 'bam':''] + } + } +} +``` + +### Updating containers + +The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. If for some reason you need to use a different version of a particular tool with the pipeline then you just need to identify the `process` name and override the Nextflow `container` definition for that process using the `withName` declaration. For example, in the [nf-core/viralrecon](https://nf-co.re/viralrecon) pipeline a tool called [Pangolin](https://github.com/cov-lineages/pangolin) has been used during the COVID-19 pandemic to assign lineages to SARS-CoV-2 genome sequenced samples. Given that the lineage assignments change quite frequently it doesn't make sense to re-release the nf-core/viralrecon everytime a new version of Pangolin has been released. However, you can override the default container used by the pipeline by creating a custom config file and passing it as a command-line argument via `-c custom.config`. + +1. Check the default version used by the pipeline in the module file for [Pangolin](https://github.com/nf-core/viralrecon/blob/a85d5969f9025409e3618d6c280ef15ce417df65/modules/nf-core/software/pangolin/main.nf#L14-L19) +2. Find the latest version of the Biocontainer available on [Quay.io](https://quay.io/repository/biocontainers/pangolin?tag=latest&tab=tags) +3. Create the custom config accordingly: + + * For Docker: + + ```nextflow + process { + withName: PANGOLIN { + container = 'quay.io/biocontainers/pangolin:3.0.5--pyhdfd78af_0' + } + } + ``` + + * For Singularity: + + ```nextflow + process { + withName: PANGOLIN { + container = 'https://depot.galaxyproject.org/singularity/pangolin:3.0.5--pyhdfd78af_0' + } + } + ``` + + * For Conda: + + ```nextflow + process { + withName: PANGOLIN { + conda = 'bioconda::pangolin=3.0.5' + } + } + ``` + +> **NB:** If you wish to periodically update individual tool-specific results (e.g. Pangolin) generated by the pipeline then you must ensure to keep the `work/` directory otherwise the `-resume` ability of the pipeline will be compromised and it will restart from scratch. + +### nf-core/configs -See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for more information. +In most cases, you will only need to create a custom config as a one-off but if you and others within your organisation are likely to be running nf-core pipelines regularly and need to use the same settings regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter. You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile. -If you are likely to be running `nf-core` pipelines regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter (see definition above). You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile. +See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for more information about creating your own configuration files. If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs). -### Running in the background +## Running in the background Nextflow handles job submissions and supervises the running jobs. The Nextflow process must run until the pipeline is finished. @@ -126,11 +274,11 @@ The Nextflow `-bg` flag launches Nextflow in the background, detached from your Alternatively, you can use `screen` / `tmux` or similar tool to create a detached session which you can log back into at a later time. Some HPC setups also allow you to run nextflow within a cluster job submitted your job scheduler (from where it submits more jobs). -#### Nextflow memory requirements +## Nextflow memory requirements In some cases, the Nextflow Java virtual machines can start to request a large amount of memory. We recommend adding the following line to your environment to limit this (typically in `~/.bashrc` or `~./bash_profile`): -```bash +```console NXF_OPTS='-Xms1g -Xmx4g' ``` diff --git a/environment.yml b/environment.yml deleted file mode 100644 index 116c3755..00000000 --- a/environment.yml +++ /dev/null @@ -1,15 +0,0 @@ -# You can use this file to create a conda environment for this pipeline: -# conda env create -f environment.yml -name: nf-core-mhcquant-1.6.1 -channels: - - conda-forge - - bioconda - - defaults -dependencies: - - conda-forge::python=3.7.3 - - conda-forge::markdown=3.1.1 - - conda-forge::pymdown-extensions=6.0 - - conda-forge::pygments=2.5.2 - # TODO nf-core: Add required software dependencies here - - bioconda::fastqc=0.11.8 - - bioconda::multiqc=1.7 diff --git a/lib/Headers.groovy b/lib/Headers.groovy deleted file mode 100644 index 15d1d388..00000000 --- a/lib/Headers.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * This file holds several functions used to render the nf-core ANSI header. - */ - -class Headers { - - private static Map log_colours(Boolean monochrome_logs) { - Map colorcodes = [:] - colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" - colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['yellow_bold'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" - colorcodes['red'] = monochrome_logs ? '' : "\033[1;91m" - return colorcodes - } - - static String dashed_line(monochrome_logs) { - Map colors = log_colours(monochrome_logs) - return "-${colors.dim}----------------------------------------------------${colors.reset}-" - } - - static String nf_core(workflow, monochrome_logs) { - Map colors = log_colours(monochrome_logs) - String.format( - """\n - ${dashed_line(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset} - ${dashed_line(monochrome_logs)} - """.stripIndent() - ) - } -} diff --git a/lib/NfcoreSchema.groovy b/lib/NfcoreSchema.groovy old mode 100644 new mode 100755 index 52ee7304..8d6920dd --- a/lib/NfcoreSchema.groovy +++ b/lib/NfcoreSchema.groovy @@ -1,6 +1,6 @@ -/* - * This file holds several functions used to perform JSON parameter validation, help and summary rendering for the nf-core pipeline template. - */ +// +// This file holds several functions used to perform JSON parameter validation, help and summary rendering for the nf-core pipeline template. +// import org.everit.json.schema.Schema import org.everit.json.schema.loader.SchemaLoader @@ -13,16 +13,23 @@ import groovy.json.JsonBuilder class NfcoreSchema { - /* - * Function to loop over all parameters defined in schema and check - * whether the given paremeters adhere to the specificiations - */ + // + // Resolve Schema path relative to main workflow directory + // + public static String getSchemaPath(workflow, schema_filename='nextflow_schema.json') { + return "${workflow.projectDir}/${schema_filename}" + } + + // + // Function to loop over all parameters defined in schema and check + // whether the given parameters adhere to the specifications + // /* groovylint-disable-next-line UnusedPrivateMethodParameter */ - private static void validateParameters(params, jsonSchema, log) { + public static void validateParameters(workflow, params, log, schema_filename='nextflow_schema.json') { def has_error = false //=====================================================================// // Check for nextflow core params and unexpected params - def json = new File(jsonSchema).text + def json = new File(getSchemaPath(workflow, schema_filename=schema_filename)).text def Map schemaParams = (Map) new JsonSlurper().parseText(json).get('definitions') def nf_params = [ // Options for base `nextflow` command @@ -114,7 +121,8 @@ class NfcoreSchema { def params_ignore = params.schema_ignore_params.split(',') + 'schema_ignore_params' def expectedParamsLowerCase = expectedParams.collect{ it.replace("-", "").toLowerCase() } def specifiedParamLowerCase = specifiedParam.replace("-", "").toLowerCase() - if (!expectedParams.contains(specifiedParam) && !params_ignore.contains(specifiedParam) && !expectedParamsLowerCase.contains(specifiedParamLowerCase)) { + def isCamelCaseBug = (specifiedParam.contains("-") && !expectedParams.contains(specifiedParam) && expectedParamsLowerCase.contains(specifiedParamLowerCase)) + if (!expectedParams.contains(specifiedParam) && !params_ignore.contains(specifiedParam) && !isCamelCaseBug) { // Temporarily remove camelCase/camel-case params #1035 def unexpectedParamsLowerCase = unexpectedParams.collect{ it.replace("-", "").toLowerCase()} if (!unexpectedParamsLowerCase.contains(specifiedParamLowerCase)){ @@ -125,36 +133,36 @@ class NfcoreSchema { //=====================================================================// // Validate parameters against the schema - InputStream inputStream = new File(jsonSchema).newInputStream() - JSONObject rawSchema = new JSONObject(new JSONTokener(inputStream)) + InputStream input_stream = new File(getSchemaPath(workflow, schema_filename=schema_filename)).newInputStream() + JSONObject raw_schema = new JSONObject(new JSONTokener(input_stream)) // Remove anything that's in params.schema_ignore_params - rawSchema = removeIgnoredParams(rawSchema, params) + raw_schema = removeIgnoredParams(raw_schema, params) - Schema schema = SchemaLoader.load(rawSchema) + Schema schema = SchemaLoader.load(raw_schema) // Clean the parameters def cleanedParams = cleanParameters(params) // Convert to JSONObject def jsonParams = new JsonBuilder(cleanedParams) - JSONObject paramsJSON = new JSONObject(jsonParams.toString()) + JSONObject params_json = new JSONObject(jsonParams.toString()) // Validate try { - schema.validate(paramsJSON) + schema.validate(params_json) } catch (ValidationException e) { println '' log.error 'ERROR: Validation of pipeline parameters failed!' JSONObject exceptionJSON = e.toJSON() - printExceptions(exceptionJSON, paramsJSON, log) + printExceptions(exceptionJSON, params_json, log) println '' has_error = true } // Check for unexpected parameters if (unexpectedParams.size() > 0) { - Map colors = log_colours(params.monochrome_logs) + Map colors = NfcoreTemplate.logColours(params.monochrome_logs) println '' def warn_msg = 'Found unexpected parameters:' for (unexpectedParam in unexpectedParams) { @@ -170,266 +178,17 @@ class NfcoreSchema { } } - // Loop over nested exceptions and print the causingException - private static void printExceptions(exJSON, paramsJSON, log) { - def causingExceptions = exJSON['causingExceptions'] - if (causingExceptions.length() == 0) { - def m = exJSON['message'] =~ /required key \[([^\]]+)\] not found/ - // Missing required param - if (m.matches()) { - log.error "* Missing required parameter: --${m[0][1]}" - } - // Other base-level error - else if (exJSON['pointerToViolation'] == '#') { - log.error "* ${exJSON['message']}" - } - // Error with specific param - else { - def param = exJSON['pointerToViolation'] - ~/^#\// - def param_val = paramsJSON[param].toString() - log.error "* --${param}: ${exJSON['message']} (${param_val})" - } - } - for (ex in causingExceptions) { - printExceptions(ex, paramsJSON, log) - } - } - - // Remove an element from a JSONArray - private static JSONArray removeElement(jsonArray, element){ - def list = [] - int len = jsonArray.length() - for (int i=0;i - if(rawSchema.keySet().contains('definitions')){ - rawSchema.definitions.each { definition -> - for (key in definition.keySet()){ - if (definition[key].get("properties").keySet().contains(ignore_param)){ - // Remove the param to ignore - definition[key].get("properties").remove(ignore_param) - // If the param was required, change this - if (definition[key].has("required")) { - def cleaned_required = removeElement(definition[key].required, ignore_param) - definition[key].put("required", cleaned_required) - } - } - } - } - } - if(rawSchema.keySet().contains('properties') && rawSchema.get('properties').keySet().contains(ignore_param)) { - rawSchema.get("properties").remove(ignore_param) - } - if(rawSchema.keySet().contains('required') && rawSchema.required.contains(ignore_param)) { - def cleaned_required = removeElement(rawSchema.required, ignore_param) - rawSchema.put("required", cleaned_required) - } - } - return rawSchema - } - - private static Map cleanParameters(params) { - def new_params = params.getClass().newInstance(params) - for (p in params) { - // remove anything evaluating to false - if (!p['value']) { - new_params.remove(p.key) - } - // Cast MemoryUnit to String - if (p['value'].getClass() == nextflow.util.MemoryUnit) { - new_params.replace(p.key, p['value'].toString()) - } - // Cast Duration to String - if (p['value'].getClass() == nextflow.util.Duration) { - new_params.replace(p.key, p['value'].toString().replaceFirst(/d(?!\S)/, "day")) - } - // Cast LinkedHashMap to String - if (p['value'].getClass() == LinkedHashMap) { - new_params.replace(p.key, p['value'].toString()) - } - } - return new_params - } - - /* - * This method tries to read a JSON params file - */ - private static LinkedHashMap params_load(String json_schema) { - def params_map = new LinkedHashMap() - try { - params_map = params_read(json_schema) - } catch (Exception e) { - println "Could not read parameters settings from JSON. $e" - params_map = new LinkedHashMap() - } - return params_map - } - - private static Map log_colours(Boolean monochrome_logs) { - Map colorcodes = [:] - - // Reset / Meta - colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" - colorcodes['bold'] = monochrome_logs ? '' : "\033[1m" - colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" - colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m" - colorcodes['blink'] = monochrome_logs ? '' : "\033[5m" - colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m" - colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" - - // Regular Colors - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" - - // Bold - colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" - colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" - colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" - colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" - colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" - colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" - colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" - colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" - - // Underline - colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" - colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" - colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" - colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" - colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" - colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" - colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" - colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" - - // High Intensity - colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" - colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" - colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" - colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" - colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" - colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" - colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" - colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" - - // Bold High Intensity - colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" - colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" - colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" - colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" - colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" - colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" - colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" - - return colorcodes - } - - static String dashed_line(monochrome_logs) { - Map colors = log_colours(monochrome_logs) - return "-${colors.dim}----------------------------------------------------${colors.reset}-" - } - - /* - Method to actually read in JSON file using Groovy. - Group (as Key), values are all parameters - - Parameter1 as Key, Description as Value - - Parameter2 as Key, Description as Value - .... - Group - - - */ - private static LinkedHashMap params_read(String json_schema) throws Exception { - def json = new File(json_schema).text - def Map schema_definitions = (Map) new JsonSlurper().parseText(json).get('definitions') - def Map schema_properties = (Map) new JsonSlurper().parseText(json).get('properties') - /* Tree looks like this in nf-core schema - * definitions <- this is what the first get('definitions') gets us - group 1 - title - description - properties - parameter 1 - type - description - parameter 2 - type - description - group 2 - title - description - properties - parameter 1 - type - description - * properties <- parameters can also be ungrouped, outside of definitions - parameter 1 - type - description - */ - - // Grouped params - def params_map = new LinkedHashMap() - schema_definitions.each { key, val -> - def Map group = schema_definitions."$key".properties // Gets the property object of the group - def title = schema_definitions."$key".title - def sub_params = new LinkedHashMap() - group.each { innerkey, value -> - sub_params.put(innerkey, value) - } - params_map.put(title, sub_params) - } - - // Ungrouped params - def ungrouped_params = new LinkedHashMap() - schema_properties.each { innerkey, value -> - ungrouped_params.put(innerkey, value) - } - params_map.put("Other parameters", ungrouped_params) - - return params_map - } - - /* - * Get maximum number of characters across all parameter names - */ - private static Integer params_max_chars(params_map) { - Integer max_chars = 0 - for (group in params_map.keySet()) { - def group_params = params_map.get(group) // This gets the parameters of that particular group - for (param in group_params.keySet()) { - if (param.size() > max_chars) { - max_chars = param.size() - } - } - } - return max_chars - } - - /* - * Beautify parameters for --help - */ - private static String params_help(workflow, params, json_schema, command) { - Map colors = log_colours(params.monochrome_logs) + // + // Beautify parameters for --help + // + public static String paramsHelp(workflow, params, command, schema_filename='nextflow_schema.json') { + Map colors = NfcoreTemplate.logColours(params.monochrome_logs) Integer num_hidden = 0 String output = '' output += 'Typical pipeline command:\n\n' output += " ${colors.cyan}${command}${colors.reset}\n\n" - Map params_map = params_load(json_schema) - Integer max_chars = params_max_chars(params_map) + 1 + Map params_map = paramsLoad(getSchemaPath(workflow, schema_filename=schema_filename)) + Integer max_chars = paramsMaxChars(params_map) + 1 Integer desc_indent = max_chars + 14 Integer dec_linewidth = 160 - desc_indent for (group in params_map.keySet()) { @@ -469,18 +228,17 @@ class NfcoreSchema { output += group_output } } - output += dashed_line(params.monochrome_logs) if (num_hidden > 0){ - output += colors.dim + "\n Hiding $num_hidden params, use --show_hidden_params to show.\n" + colors.reset - output += dashed_line(params.monochrome_logs) + output += colors.dim + "!! Hiding $num_hidden params, use --show_hidden_params to show them !!\n" + colors.reset } + output += NfcoreTemplate.dashedLine(params.monochrome_logs) return output } - /* - * Groovy Map summarising parameters/workflow options used by the pipeline - */ - private static LinkedHashMap params_summary_map(workflow, params, json_schema) { + // + // Groovy Map summarising parameters/workflow options used by the pipeline + // + public static LinkedHashMap paramsSummaryMap(workflow, params, schema_filename='nextflow_schema.json') { // Get a selection of core Nextflow workflow options def Map workflow_summary = [:] if (workflow.revision) { @@ -503,7 +261,7 @@ class NfcoreSchema { // Get pipeline parameters defined in JSON Schema def Map params_summary = [:] def blacklist = ['hostnames'] - def params_map = params_load(json_schema) + def params_map = paramsLoad(getSchemaPath(workflow, schema_filename=schema_filename)) for (group in params_map.keySet()) { def sub_params = new LinkedHashMap() def group_params = params_map.get(group) // This gets the parameters of that particular group @@ -546,14 +304,14 @@ class NfcoreSchema { return [ 'Core Nextflow options' : workflow_summary ] << params_summary } - /* - * Beautify parameters for summary and return as string - */ - private static String params_summary_log(workflow, params, json_schema) { - Map colors = log_colours(params.monochrome_logs) + // + // Beautify parameters for summary and return as string + // + public static String paramsSummaryLog(workflow, params) { + Map colors = NfcoreTemplate.logColours(params.monochrome_logs) String output = '' - def params_map = params_summary_map(workflow, params, json_schema) - def max_chars = params_max_chars(params_map) + def params_map = paramsSummaryMap(workflow, params) + def max_chars = paramsMaxChars(params_map) for (group in params_map.keySet()) { def group_params = params_map.get(group) // This gets the parameters of that particular group if (group_params) { @@ -564,10 +322,196 @@ class NfcoreSchema { output += '\n' } } - output += dashed_line(params.monochrome_logs) - output += colors.dim + "\n Only displaying parameters that differ from defaults.\n" + colors.reset - output += dashed_line(params.monochrome_logs) + output += "!! Only displaying parameters that differ from the pipeline defaults !!\n" + output += NfcoreTemplate.dashedLine(params.monochrome_logs) return output } + // + // Loop over nested exceptions and print the causingException + // + private static void printExceptions(ex_json, params_json, log) { + def causingExceptions = ex_json['causingExceptions'] + if (causingExceptions.length() == 0) { + def m = ex_json['message'] =~ /required key \[([^\]]+)\] not found/ + // Missing required param + if (m.matches()) { + log.error "* Missing required parameter: --${m[0][1]}" + } + // Other base-level error + else if (ex_json['pointerToViolation'] == '#') { + log.error "* ${ex_json['message']}" + } + // Error with specific param + else { + def param = ex_json['pointerToViolation'] - ~/^#\// + def param_val = params_json[param].toString() + log.error "* --${param}: ${ex_json['message']} (${param_val})" + } + } + for (ex in causingExceptions) { + printExceptions(ex, params_json, log) + } + } + + // + // Remove an element from a JSONArray + // + private static JSONArray removeElement(json_array, element) { + def list = [] + int len = json_array.length() + for (int i=0;i + if(raw_schema.keySet().contains('definitions')){ + raw_schema.definitions.each { definition -> + for (key in definition.keySet()){ + if (definition[key].get("properties").keySet().contains(ignore_param)){ + // Remove the param to ignore + definition[key].get("properties").remove(ignore_param) + // If the param was required, change this + if (definition[key].has("required")) { + def cleaned_required = removeElement(definition[key].required, ignore_param) + definition[key].put("required", cleaned_required) + } + } + } + } + } + if(raw_schema.keySet().contains('properties') && raw_schema.get('properties').keySet().contains(ignore_param)) { + raw_schema.get("properties").remove(ignore_param) + } + if(raw_schema.keySet().contains('required') && raw_schema.required.contains(ignore_param)) { + def cleaned_required = removeElement(raw_schema.required, ignore_param) + raw_schema.put("required", cleaned_required) + } + } + return raw_schema + } + + // + // Clean and check parameters relative to Nextflow native classes + // + private static Map cleanParameters(params) { + def new_params = params.getClass().newInstance(params) + for (p in params) { + // remove anything evaluating to false + if (!p['value']) { + new_params.remove(p.key) + } + // Cast MemoryUnit to String + if (p['value'].getClass() == nextflow.util.MemoryUnit) { + new_params.replace(p.key, p['value'].toString()) + } + // Cast Duration to String + if (p['value'].getClass() == nextflow.util.Duration) { + new_params.replace(p.key, p['value'].toString().replaceFirst(/d(?!\S)/, "day")) + } + // Cast LinkedHashMap to String + if (p['value'].getClass() == LinkedHashMap) { + new_params.replace(p.key, p['value'].toString()) + } + } + return new_params + } + + // + // This function tries to read a JSON params file + // + private static LinkedHashMap paramsLoad(String json_schema) { + def params_map = new LinkedHashMap() + try { + params_map = paramsRead(json_schema) + } catch (Exception e) { + println "Could not read parameters settings from JSON. $e" + params_map = new LinkedHashMap() + } + return params_map + } + + // + // Method to actually read in JSON file using Groovy. + // Group (as Key), values are all parameters + // - Parameter1 as Key, Description as Value + // - Parameter2 as Key, Description as Value + // .... + // Group + // - + private static LinkedHashMap paramsRead(String json_schema) throws Exception { + def json = new File(json_schema).text + def Map schema_definitions = (Map) new JsonSlurper().parseText(json).get('definitions') + def Map schema_properties = (Map) new JsonSlurper().parseText(json).get('properties') + /* Tree looks like this in nf-core schema + * definitions <- this is what the first get('definitions') gets us + group 1 + title + description + properties + parameter 1 + type + description + parameter 2 + type + description + group 2 + title + description + properties + parameter 1 + type + description + * properties <- parameters can also be ungrouped, outside of definitions + parameter 1 + type + description + */ + + // Grouped params + def params_map = new LinkedHashMap() + schema_definitions.each { key, val -> + def Map group = schema_definitions."$key".properties // Gets the property object of the group + def title = schema_definitions."$key".title + def sub_params = new LinkedHashMap() + group.each { innerkey, value -> + sub_params.put(innerkey, value) + } + params_map.put(title, sub_params) + } + + // Ungrouped params + def ungrouped_params = new LinkedHashMap() + schema_properties.each { innerkey, value -> + ungrouped_params.put(innerkey, value) + } + params_map.put("Other parameters", ungrouped_params) + + return params_map + } + + // + // Get maximum number of characters across all parameter names + // + private static Integer paramsMaxChars(params_map) { + Integer max_chars = 0 + for (group in params_map.keySet()) { + def group_params = params_map.get(group) // This gets the parameters of that particular group + for (param in group_params.keySet()) { + if (param.size() > max_chars) { + max_chars = param.size() + } + } + } + return max_chars + } } diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy new file mode 100755 index 00000000..b6e689ec --- /dev/null +++ b/lib/NfcoreTemplate.groovy @@ -0,0 +1,266 @@ +// +// This file holds several functions used within the nf-core pipeline template. +// + +import org.yaml.snakeyaml.Yaml + +class NfcoreTemplate { + + // + // Check AWS Batch related parameters have been specified correctly + // + public static void awsBatch(workflow, params) { + if (workflow.profile.contains('awsbatch')) { + // Check params.awsqueue and params.awsregion have been set if running on AWSBatch + assert (params.awsqueue && params.awsregion) : "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" + // Check outdir paths to be S3 buckets if running on AWSBatch + assert params.outdir.startsWith('s3:') : "Outdir not on S3 - specify S3 Bucket to run on AWSBatch!" + } + } + + // + // Check params.hostnames + // + public static void hostName(workflow, params, log) { + Map colors = logColours(params.monochrome_logs) + if (params.hostnames) { + def hostname = "hostname".execute().text.trim() + params.hostnames.each { prof, hnames -> + hnames.each { hname -> + if (hostname.contains(hname) && !workflow.profile.contains(prof)) { + log.info "=${colors.yellow}====================================================${colors.reset}=\n" + + "${colors.yellow}WARN: You are running with `-profile $workflow.profile`\n" + + " but your machine hostname is ${colors.white}'$hostname'${colors.reset}.\n" + + " ${colors.yellow_bold}Please use `-profile $prof${colors.reset}`\n" + + "=${colors.yellow}====================================================${colors.reset}=" + } + } + } + } + } + + // + // Construct and send completion email + // + public static void email(workflow, params, summary_params, projectDir, log, multiqc_report=[]) { + + // Set up the e-mail variables + def subject = "[$workflow.manifest.name] Successful: $workflow.runName" + if (!workflow.success) { + subject = "[$workflow.manifest.name] FAILED: $workflow.runName" + } + + def summary = [:] + for (group in summary_params.keySet()) { + summary << summary_params[group] + } + + def misc_fields = [:] + misc_fields['Date Started'] = workflow.start + misc_fields['Date Completed'] = workflow.complete + misc_fields['Pipeline script file path'] = workflow.scriptFile + misc_fields['Pipeline script hash ID'] = workflow.scriptId + if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository + if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId + if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision + misc_fields['Nextflow Version'] = workflow.nextflow.version + misc_fields['Nextflow Build'] = workflow.nextflow.build + misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp + + def email_fields = [:] + email_fields['version'] = workflow.manifest.version + email_fields['runName'] = workflow.runName + email_fields['success'] = workflow.success + email_fields['dateComplete'] = workflow.complete + email_fields['duration'] = workflow.duration + email_fields['exitStatus'] = workflow.exitStatus + email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + email_fields['errorReport'] = (workflow.errorReport ?: 'None') + email_fields['commandLine'] = workflow.commandLine + email_fields['projectDir'] = workflow.projectDir + email_fields['summary'] = summary << misc_fields + + // On success try attach the multiqc report + def mqc_report = null + try { + if (workflow.success) { + mqc_report = multiqc_report.getVal() + if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { + if (mqc_report.size() > 1) { + log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one" + } + mqc_report = mqc_report[0] + } + } + } catch (all) { + if (multiqc_report) { + log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email" + } + } + + // Check if we are only sending emails on failure + def email_address = params.email + if (!params.email && params.email_on_fail && !workflow.success) { + email_address = params.email_on_fail + } + + // Render the TXT template + def engine = new groovy.text.GStringTemplateEngine() + def tf = new File("$projectDir/assets/email_template.txt") + def txt_template = engine.createTemplate(tf).make(email_fields) + def email_txt = txt_template.toString() + + // Render the HTML template + def hf = new File("$projectDir/assets/email_template.html") + def html_template = engine.createTemplate(hf).make(email_fields) + def email_html = html_template.toString() + + // Render the sendmail template + def max_multiqc_email_size = params.max_multiqc_email_size as nextflow.util.MemoryUnit + def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] + def sf = new File("$projectDir/assets/sendmail_template.txt") + def sendmail_template = engine.createTemplate(sf).make(smail_fields) + def sendmail_html = sendmail_template.toString() + + // Send the HTML e-mail + Map colors = logColours(params.monochrome_logs) + if (email_address) { + try { + if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } + // Try to send HTML e-mail using sendmail + [ 'sendmail', '-t' ].execute() << sendmail_html + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" + } catch (all) { + // Catch failures and try with plaintext + def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] + if ( mqc_report.size() <= max_multiqc_email_size.toBytes() ) { + mail_cmd += [ '-A', mqc_report ] + } + mail_cmd.execute() << email_html + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" + } + } + + // Write summary e-mail HTML to a file + def output_d = new File("${params.outdir}/pipeline_info/") + if (!output_d.exists()) { + output_d.mkdirs() + } + def output_hf = new File(output_d, "pipeline_report.html") + output_hf.withWriter { w -> w << email_html } + def output_tf = new File(output_d, "pipeline_report.txt") + output_tf.withWriter { w -> w << email_txt } + } + + // + // Print pipeline summary on completion + // + public static void summary(workflow, params, log) { + Map colors = logColours(params.monochrome_logs) + if (workflow.success) { + if (workflow.stats.ignoredCount == 0) { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" + } else { + log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + } + } else { + hostName(workflow, params, log) + log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" + } + } + + // + // ANSII Colours used for terminal logging + // + public static Map logColours(Boolean monochrome_logs) { + Map colorcodes = [:] + + // Reset / Meta + colorcodes['reset'] = monochrome_logs ? '' : "\033[0m" + colorcodes['bold'] = monochrome_logs ? '' : "\033[1m" + colorcodes['dim'] = monochrome_logs ? '' : "\033[2m" + colorcodes['underlined'] = monochrome_logs ? '' : "\033[4m" + colorcodes['blink'] = monochrome_logs ? '' : "\033[5m" + colorcodes['reverse'] = monochrome_logs ? '' : "\033[7m" + colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" + + // Regular Colors + colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" + colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" + colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" + colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" + colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" + colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" + colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" + colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" + + // Bold + colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" + colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" + colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" + colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" + colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" + colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" + colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" + colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" + + // Underline + colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" + colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" + colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" + colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" + colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" + colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" + colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" + colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" + + // High Intensity + colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" + colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" + colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" + colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" + colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" + colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" + colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" + colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" + + // Bold High Intensity + colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" + colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" + colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" + colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" + colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" + colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" + colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" + colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" + + return colorcodes + } + + // + // Does what is says on the tin + // + public static String dashedLine(monochrome_logs) { + Map colors = logColours(monochrome_logs) + return "-${colors.dim}----------------------------------------------------${colors.reset}-" + } + + // + // nf-core logo + // + public static String logo(workflow, monochrome_logs) { + Map colors = logColours(monochrome_logs) + String.format( + """\n + ${dashedLine(monochrome_logs)} + ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} + ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} + ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} + ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} + ${colors.green}`._,._,\'${colors.reset} + ${colors.purple} ${workflow.manifest.name} v${workflow.manifest.version}${colors.reset} + ${dashedLine(monochrome_logs)} + """.stripIndent() + ) + } +} diff --git a/lib/Utils.groovy b/lib/Utils.groovy new file mode 100755 index 00000000..18173e98 --- /dev/null +++ b/lib/Utils.groovy @@ -0,0 +1,47 @@ +// +// This file holds several Groovy functions that could be useful for any Nextflow pipeline +// + +import org.yaml.snakeyaml.Yaml + +class Utils { + + // + // When running with -profile conda, warn if channels have not been set-up appropriately + // + public static void checkCondaChannels(log) { + Yaml parser = new Yaml() + def channels = [] + try { + def config = parser.load("conda config --show channels".execute().text) + channels = config.channels + } catch(NullPointerException | IOException e) { + log.warn "Could not verify conda channel configuration." + return + } + + // Check that all channels are present + def required_channels = ['conda-forge', 'bioconda', 'defaults'] + def conda_check_failed = !required_channels.every { ch -> ch in channels } + + // Check that they are in the right order + conda_check_failed |= !(channels.indexOf('conda-forge') < channels.indexOf('bioconda')) + conda_check_failed |= !(channels.indexOf('bioconda') < channels.indexOf('defaults')) + + if (conda_check_failed) { + log.warn "=============================================================================\n" + + " There is a problem with your Conda configuration!\n\n" + + " You will need to set-up the conda-forge and bioconda channels correctly.\n" + + " Please refer to https://bioconda.github.io/user/install.html#set-up-channels\n" + + " NB: The order of the channels matters!\n" + + "===================================================================================" + } + } + + // + // Join module args with appropriate spacing + // + public static String joinModuleArgs(args_list) { + return ' ' + args_list.join(' ') + } +} diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy new file mode 100755 index 00000000..f0bb877e --- /dev/null +++ b/lib/WorkflowMain.groovy @@ -0,0 +1,94 @@ +// +// This file holds several functions specific to the main.nf workflow in the nf-core/mhcquant pipeline +// + +class WorkflowMain { + + // + // Citation string for pipeline + // + public static String citation(workflow) { + return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + + // TODO nf-core: Add Zenodo DOI for pipeline after first release + //"* The pipeline\n" + + //" https://doi.org/10.5281/zenodo.XXXXXXX\n\n" + + "* The nf-core framework\n" + + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + + "* Software dependencies\n" + + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" + } + + // + // Print help to screen if required + // + public static String help(workflow, params, log) { + def command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" + def help_string = '' + help_string += NfcoreTemplate.logo(workflow, params.monochrome_logs) + help_string += NfcoreSchema.paramsHelp(workflow, params, command) + help_string += '\n' + citation(workflow) + '\n' + help_string += NfcoreTemplate.dashedLine(params.monochrome_logs) + return help_string + } + + // + // Print parameter summary log to screen + // + public static String paramsSummaryLog(workflow, params, log) { + def summary_log = '' + summary_log += NfcoreTemplate.logo(workflow, params.monochrome_logs) + summary_log += NfcoreSchema.paramsSummaryLog(workflow, params) + summary_log += '\n' + citation(workflow) + '\n' + summary_log += NfcoreTemplate.dashedLine(params.monochrome_logs) + return summary_log + } + + // + // Validate parameters and print summary to screen + // + public static void initialise(workflow, params, log) { + // Print help to screen if required + if (params.help) { + log.info help(workflow, params, log) + System.exit(0) + } + + // Validate workflow parameters via the JSON schema + if (params.validate_params) { + NfcoreSchema.validateParameters(workflow, params, log) + } + + // Print parameter summary log to screen + log.info paramsSummaryLog(workflow, params, log) + + // Check that conda channels are set-up correctly + if (params.enable_conda) { + Utils.checkCondaChannels(log) + } + + // Check AWS batch settings + NfcoreTemplate.awsBatch(workflow, params) + + // Check the hostnames against configured profiles + NfcoreTemplate.hostName(workflow, params, log) + + // Check input has been provided + if (!params.input) { + log.error "Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'" + System.exit(1) + } + } + + // + // Get attribute from genome config file e.g. fasta + // + public static String getGenomeAttribute(params, attribute) { + def val = '' + if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) { + if (params.genomes[ params.genome ].containsKey(attribute)) { + val = params.genomes[ params.genome ][ attribute ] + } + } + return val + } +} diff --git a/lib/WorkflowMhcquant.groovy b/lib/WorkflowMhcquant.groovy new file mode 100755 index 00000000..8dd14946 --- /dev/null +++ b/lib/WorkflowMhcquant.groovy @@ -0,0 +1,59 @@ +// +// This file holds several functions specific to the workflow/mhcquant.nf in the nf-core/mhcquant pipeline +// + +class WorkflowMhcquant { + + // + // Check and validate parameters + // + public static void initialise(params, log) { + genomeExistsError(params, log) + + if (!params.fasta) { + log.error "Genome fasta file not specified with e.g. '--fasta genome.fa' or via a detectable config file." + System.exit(1) + } + } + + // + // Get workflow summary for MultiQC + // + public static String paramsSummaryMultiqc(workflow, summary) { + String summary_section = '' + for (group in summary.keySet()) { + def group_params = summary.get(group) // This gets the parameters of that particular group + if (group_params) { + summary_section += "

$group

\n" + summary_section += "
\n" + } + } + + String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" + yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" + yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" + yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" + yaml_file_text += "plot_type: 'html'\n" + yaml_file_text += "data: |\n" + yaml_file_text += "${summary_section}" + return yaml_file_text + } + + // + // Exit pipeline if incorrect --genome key provided + // + private static void genomeExistsError(params, log) { + if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) { + log.error "=============================================================================\n" + + " Genome '${params.genome}' not found in any config files provided to the pipeline.\n" + + " Currently, the available genome keys are:\n" + + " ${params.genomes.keySet().join(", ")}\n" + + "===================================================================================" + System.exit(1) + } + } +} diff --git a/main.nf b/main.nf index 43705fde..1cff392d 100644 --- a/main.nf +++ b/main.nf @@ -1,390 +1,63 @@ #!/usr/bin/env nextflow /* ======================================================================================== - nf-core/mhcquant + nf-core/mhcquant ======================================================================================== - nf-core/mhcquant Analysis Pipeline. - #### Homepage / Documentation - https://github.com/nf-core/mhcquant + Github : https://github.com/nf-core/mhcquant + Website: https://nf-co.re/mhcquant + Slack : https://nfcore.slack.com/channels/mhcquant ---------------------------------------------------------------------------------------- */ -log.info Headers.nf_core(workflow, params.monochrome_logs) - -//////////////////////////////////////////////////// -/* -- PRINT HELP -- */ -////////////////////////////////////////////////////+ -def json_schema = "$projectDir/nextflow_schema.json" -if (params.help) { - def command = "nextflow run nf-core/mhcquant --input '*_R{1,2}.fastq.gz' -profile docker" - log.info NfcoreSchema.params_help(workflow, params, json_schema, command) - exit 0 -} - -//////////////////////////////////////////////////// -/* -- VALIDATE PARAMETERS -- */ -////////////////////////////////////////////////////+ -if (params.validate_params) { - NfcoreSchema.validateParameters(params, json_schema, log) -} - -//////////////////////////////////////////////////// -/* -- Collect configuration parameters -- */ -//////////////////////////////////////////////////// - -// Check if genome exists in the config file -if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) { - exit 1, "The provided genome '${params.genome}' is not available in the iGenomes file. Currently the available genomes are ${params.genomes.keySet().join(', ')}" -} - -// TODO nf-core: Add any reference files that are needed -// Configurable reference genomes -// -// NOTE - THIS IS NOT USED IN THIS PIPELINE, EXAMPLE ONLY -// If you want to use the channel below in a process, define the following: -// input: -// file fasta from ch_fasta -// -params.fasta = params.genome ? params.genomes[ params.genome ].fasta ?: false : false -if (params.fasta) { ch_fasta = file(params.fasta, checkIfExists: true) } - -// Check AWS batch settings -if (workflow.profile.contains('awsbatch')) { - // AWSBatch sanity checking - if (!params.awsqueue || !params.awsregion) exit 1, 'Specify correct --awsqueue and --awsregion parameters on AWSBatch!' - // Check outdir paths to be S3 buckets if running on AWSBatch - // related: https://github.com/nextflow-io/nextflow/issues/813 - if (!params.outdir.startsWith('s3:')) exit 1, 'Outdir not on S3 - specify S3 Bucket to run on AWSBatch!' - // Prevent trace files to be stored on S3 since S3 does not support rolling files. - if (params.tracedir.startsWith('s3:')) exit 1, 'Specify a local tracedir or run without trace! S3 cannot be used for tracefiles.' -} - -// Stage config files -ch_multiqc_config = file("$projectDir/assets/multiqc_config.yaml", checkIfExists: true) -ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config, checkIfExists: true) : Channel.empty() -ch_output_docs = file("$projectDir/docs/output.md", checkIfExists: true) -ch_output_docs_images = file("$projectDir/docs/images/", checkIfExists: true) +nextflow.enable.dsl = 2 /* - * Create a channel for input read files - */ -if (params.input_paths) { - if (params.single_end) { - Channel - .from(params.input_paths) - .map { row -> [ row[0], [ file(row[1][0], checkIfExists: true) ] ] } - .ifEmpty { exit 1, 'params.input_paths was empty - no input files supplied' } - .into { ch_read_files_fastqc; ch_read_files_trimming } - } else { - Channel - .from(params.input_paths) - .map { row -> [ row[0], [ file(row[1][0], checkIfExists: true), file(row[1][1], checkIfExists: true) ] ] } - .ifEmpty { exit 1, 'params.input_paths was empty - no input files supplied' } - .into { ch_read_files_fastqc; ch_read_files_trimming } - } -} else { - Channel - .fromFilePairs(params.input, size: params.single_end ? 1 : 2) - .ifEmpty { exit 1, "Cannot find any reads matching: ${params.input}\nNB: Path needs to be enclosed in quotes!\nIf this is single-end data, please specify --single_end on the command line." } - .into { ch_read_files_fastqc; ch_read_files_trimming } -} - -//////////////////////////////////////////////////// -/* -- PRINT PARAMETER SUMMARY -- */ -//////////////////////////////////////////////////// -log.info NfcoreSchema.params_summary_log(workflow, params, json_schema) - -// Header log info -def summary = [:] -if (workflow.revision) summary['Pipeline Release'] = workflow.revision -summary['Run Name'] = workflow.runName -// TODO nf-core: Report custom parameters here -summary['Input'] = params.input -summary['Fasta Ref'] = params.fasta -summary['Data Type'] = params.single_end ? 'Single-End' : 'Paired-End' -summary['Max Resources'] = "$params.max_memory memory, $params.max_cpus cpus, $params.max_time time per job" -if (workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container" -summary['Output dir'] = params.outdir -summary['Launch dir'] = workflow.launchDir -summary['Working dir'] = workflow.workDir -summary['Script dir'] = workflow.projectDir -summary['User'] = workflow.userName -if (workflow.profile.contains('awsbatch')) { - summary['AWS Region'] = params.awsregion - summary['AWS Queue'] = params.awsqueue - summary['AWS CLI'] = params.awscli -} -summary['Config Profile'] = workflow.profile -if (params.config_profile_description) summary['Config Profile Description'] = params.config_profile_description -if (params.config_profile_contact) summary['Config Profile Contact'] = params.config_profile_contact -if (params.config_profile_url) summary['Config Profile URL'] = params.config_profile_url -summary['Config Files'] = workflow.configFiles.join(', ') -if (params.email || params.email_on_fail) { - summary['E-mail Address'] = params.email - summary['E-mail on failure'] = params.email_on_fail - summary['MultiQC maxsize'] = params.max_multiqc_email_size -} - -// Check the hostnames against configured profiles -checkHostname() - -Channel.from(summary.collect{ [it.key, it.value] }) - .map { k,v -> "
$k
${v ?: 'N/A'}
" } - .reduce { a, b -> return [a, b].join("\n ") } - .map { x -> """ - id: 'nf-core-mhcquant-summary' - description: " - this information is collected when the pipeline is started." - section_name: 'nf-core/mhcquant Workflow Summary' - section_href: 'https://github.com/nf-core/mhcquant' - plot_type: 'html' - data: | -
- $x -
- """.stripIndent() } - .set { ch_workflow_summary } - -/* - * Parse software version numbers - */ -process get_software_versions { - publishDir "${params.outdir}/pipeline_info", mode: params.publish_dir_mode, - saveAs: { filename -> - if (filename.indexOf('.csv') > 0) filename - else null - } - - output: - file 'software_versions_mqc.yaml' into ch_software_versions_yaml - file 'software_versions.csv' +======================================================================================== + GENOME PARAMETER VALUES +======================================================================================== +*/ - script: - // TODO nf-core: Get all tools to print their version number here - """ - echo $workflow.manifest.version > v_pipeline.txt - echo $workflow.nextflow.version > v_nextflow.txt - fastqc --version > v_fastqc.txt - multiqc --version > v_multiqc.txt - scrape_software_versions.py &> software_versions_mqc.yaml - """ -} +params.fasta = WorkflowMain.getGenomeAttribute(params, 'fasta') /* - * STEP 1 - FastQC - */ -process fastqc { - tag "$name" - label 'process_medium' - publishDir "${params.outdir}/fastqc", mode: params.publish_dir_mode, - saveAs: { filename -> - filename.indexOf('.zip') > 0 ? "zips/$filename" : "$filename" - } - - input: - set val(name), file(reads) from ch_read_files_fastqc - - output: - file '*_fastqc.{zip,html}' into ch_fastqc_results +======================================================================================== + VALIDATE & PRINT PARAMETER SUMMARY +======================================================================================== +*/ - script: - """ - fastqc --quiet --threads $task.cpus $reads - """ -} +WorkflowMain.initialise(workflow, params, log) /* - * STEP 2 - MultiQC - */ -process multiqc { - publishDir "${params.outdir}/MultiQC", mode: params.publish_dir_mode - - input: - file (multiqc_config) from ch_multiqc_config - file (mqc_custom_config) from ch_multiqc_custom_config.collect().ifEmpty([]) - // TODO nf-core: Add in log files from your new processes for MultiQC to find! - file ('fastqc/*') from ch_fastqc_results.collect().ifEmpty([]) - file ('software_versions/*') from ch_software_versions_yaml.collect() - file workflow_summary from ch_workflow_summary.collectFile(name: "workflow_summary_mqc.yaml") +======================================================================================== + NAMED WORKFLOW FOR PIPELINE +======================================================================================== +*/ - output: - file "*multiqc_report.html" into ch_multiqc_report - file "*_data" - file "multiqc_plots" +include { MHCQUANT } from './workflows/mhcquant' - script: - rtitle = '' - rfilename = '' - if (!(workflow.runName ==~ /[a-z]+_[a-z]+/)) { - rtitle = "--title \"${workflow.runName}\"" - rfilename = "--filename " + workflow.runName.replaceAll('\\W','_').replaceAll('_+','_') + "_multiqc_report" - } - custom_config_file = params.multiqc_config ? "--config $mqc_custom_config" : '' - // TODO nf-core: Specify which MultiQC modules to use with -m for a faster run time - """ - multiqc -f $rtitle $rfilename $custom_config_file . - """ +// +// WORKFLOW: Run main nf-core/mhcquant analysis pipeline +// +workflow NFCORE_MHCQUANT { + MHCQUANT () } /* - * STEP 3 - Output Description HTML - */ -process output_documentation { - publishDir "${params.outdir}/pipeline_info", mode: params.publish_dir_mode - - input: - file output_docs from ch_output_docs - file images from ch_output_docs_images - - output: - file 'results_description.html' +======================================================================================== + RUN ALL WORKFLOWS +======================================================================================== +*/ - script: - """ - markdown_to_html.py $output_docs -o results_description.html - """ +// +// WORKFLOW: Execute a single named workflow for the pipeline +// See: https://github.com/nf-core/rnaseq/issues/619 +// +workflow { + NFCORE_MHCQUANT () } /* - * Completion e-mail notification - */ -workflow.onComplete { - - // Set up the e-mail variables - def subject = "[nf-core/mhcquant] Successful: $workflow.runName" - if (!workflow.success) { - subject = "[nf-core/mhcquant] FAILED: $workflow.runName" - } - def email_fields = [:] - email_fields['version'] = workflow.manifest.version - email_fields['runName'] = workflow.runName - email_fields['success'] = workflow.success - email_fields['dateComplete'] = workflow.complete - email_fields['duration'] = workflow.duration - email_fields['exitStatus'] = workflow.exitStatus - email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - email_fields['errorReport'] = (workflow.errorReport ?: 'None') - email_fields['commandLine'] = workflow.commandLine - email_fields['projectDir'] = workflow.projectDir - email_fields['summary'] = summary - email_fields['summary']['Date Started'] = workflow.start - email_fields['summary']['Date Completed'] = workflow.complete - email_fields['summary']['Pipeline script file path'] = workflow.scriptFile - email_fields['summary']['Pipeline script hash ID'] = workflow.scriptId - if (workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository - if (workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId - if (workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision - email_fields['summary']['Nextflow Version'] = workflow.nextflow.version - email_fields['summary']['Nextflow Build'] = workflow.nextflow.build - email_fields['summary']['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp - - // TODO nf-core: If not using MultiQC, strip out this code (including params.max_multiqc_email_size) - // On success try attach the multiqc report - def mqc_report = null - try { - if (workflow.success) { - mqc_report = ch_multiqc_report.getVal() - if (mqc_report.getClass() == ArrayList) { - log.warn "[nf-core/mhcquant] Found multiple reports from process 'multiqc', will use only one" - mqc_report = mqc_report[0] - } - } - } catch (all) { - log.warn "[nf-core/mhcquant] Could not attach MultiQC report to summary email" - } - - // Check if we are only sending emails on failure - email_address = params.email - if (!params.email && params.email_on_fail && !workflow.success) { - email_address = params.email_on_fail - } - - // Render the TXT template - def engine = new groovy.text.GStringTemplateEngine() - def tf = new File("$projectDir/assets/email_template.txt") - def txt_template = engine.createTemplate(tf).make(email_fields) - def email_txt = txt_template.toString() - - // Render the HTML template - def hf = new File("$projectDir/assets/email_template.html") - def html_template = engine.createTemplate(hf).make(email_fields) - def email_html = html_template.toString() - - // Render the sendmail template - def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir", mqcFile: mqc_report, mqcMaxSize: params.max_multiqc_email_size.toBytes() ] - def sf = new File("$projectDir/assets/sendmail_template.txt") - def sendmail_template = engine.createTemplate(sf).make(smail_fields) - def sendmail_html = sendmail_template.toString() - - // Send the HTML e-mail - if (email_address) { - try { - if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') } - // Try to send HTML e-mail using sendmail - [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "[nf-core/mhcquant] Sent summary e-mail to $email_address (sendmail)" - } catch (all) { - // Catch failures and try with plaintext - def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] - if ( mqc_report.size() <= params.max_multiqc_email_size.toBytes() ) { - mail_cmd += [ '-A', mqc_report ] - } - mail_cmd.execute() << email_html - log.info "[nf-core/mhcquant] Sent summary e-mail to $email_address (mail)" - } - } - - // Write summary e-mail HTML to a file - def output_d = new File("${params.outdir}/pipeline_info/") - if (!output_d.exists()) { - output_d.mkdirs() - } - def output_hf = new File(output_d, "pipeline_report.html") - output_hf.withWriter { w -> w << email_html } - def output_tf = new File(output_d, "pipeline_report.txt") - output_tf.withWriter { w -> w << email_txt } - - c_green = params.monochrome_logs ? '' : "\033[0;32m"; - c_purple = params.monochrome_logs ? '' : "\033[0;35m"; - c_red = params.monochrome_logs ? '' : "\033[0;31m"; - c_reset = params.monochrome_logs ? '' : "\033[0m"; - - if (workflow.stats.ignoredCount > 0 && workflow.success) { - log.info "-${c_purple}Warning, pipeline completed, but with errored process(es) ${c_reset}-" - log.info "-${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCount} ${c_reset}-" - log.info "-${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCount} ${c_reset}-" - } - - if (workflow.success) { - log.info "-${c_purple}[nf-core/mhcquant]${c_green} Pipeline completed successfully${c_reset}-" - } else { - checkHostname() - log.info "-${c_purple}[nf-core/mhcquant]${c_red} Pipeline completed with errors${c_reset}-" - } - -} - -workflow.onError { - // Print unexpected parameters - easiest is to just rerun validation - NfcoreSchema.validateParameters(params, json_schema, log) -} - -def checkHostname() { - def c_reset = params.monochrome_logs ? '' : "\033[0m" - def c_white = params.monochrome_logs ? '' : "\033[0;37m" - def c_red = params.monochrome_logs ? '' : "\033[1;91m" - def c_yellow_bold = params.monochrome_logs ? '' : "\033[1;93m" - if (params.hostnames) { - def hostname = 'hostname'.execute().text.trim() - params.hostnames.each { prof, hnames -> - hnames.each { hname -> - if (hostname.contains(hname) && !workflow.profile.contains(prof)) { - log.error "${c_red}====================================================${c_reset}\n" + - " ${c_red}WARNING!${c_reset} You are running with `-profile $workflow.profile`\n" + - " but your machine hostname is ${c_white}'$hostname'${c_reset}\n" + - " ${c_yellow_bold}It's highly recommended that you use `-profile $prof${c_reset}`\n" + - "${c_red}====================================================${c_reset}\n" - } - } - } - } -} +======================================================================================== + THE END +======================================================================================== +*/ diff --git a/modules.json b/modules.json new file mode 100644 index 00000000..fe99308c --- /dev/null +++ b/modules.json @@ -0,0 +1,14 @@ +{ + "name": "nf-core/mhcquant", + "homePage": "https://github.com/nf-core/mhcquant", + "repos": { + "nf-core/modules": { + "fastqc": { + "git_sha": "e937c7950af70930d1f34bb961403d9d2aa81c7d" + }, + "multiqc": { + "git_sha": "e937c7950af70930d1f34bb961403d9d2aa81c7d" + } + } + } +} diff --git a/modules/local/functions.nf b/modules/local/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/local/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/local/get_software_versions.nf b/modules/local/get_software_versions.nf new file mode 100644 index 00000000..01220c97 --- /dev/null +++ b/modules/local/get_software_versions.nf @@ -0,0 +1,33 @@ +// Import generic module functions +include { saveFiles } from './functions' + +params.options = [:] + +process GET_SOFTWARE_VERSIONS { + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/python:3.8.3" + } else { + container "quay.io/biocontainers/python:3.8.3" + } + + cache false + + input: + path versions + + output: + path "software_versions.tsv" , emit: tsv + path 'software_versions_mqc.yaml', emit: yaml + + script: // This script is bundled with the pipeline, in nf-core/mhcquant/bin/ + """ + echo $workflow.manifest.version > pipeline.version.txt + echo $workflow.nextflow.version > nextflow.version.txt + scrape_software_versions.py &> software_versions_mqc.yaml + """ +} diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf new file mode 100644 index 00000000..b2e6ad62 --- /dev/null +++ b/modules/local/samplesheet_check.nf @@ -0,0 +1,31 @@ +// Import generic module functions +include { saveFiles } from './functions' + +params.options = [:] + +process SAMPLESHEET_CHECK { + tag "$samplesheet" + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/python:3.8.3" + } else { + container "quay.io/biocontainers/python:3.8.3" + } + + input: + path samplesheet + + output: + path '*.csv' + + script: // This script is bundled with the pipeline, in nf-core/mhcquant/bin/ + """ + check_samplesheet.py \\ + $samplesheet \\ + samplesheet.valid.csv + """ +} diff --git a/modules/nf-core/modules/fastqc/functions.nf b/modules/nf-core/modules/fastqc/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/nf-core/modules/fastqc/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/nf-core/modules/fastqc/main.nf b/modules/nf-core/modules/fastqc/main.nf new file mode 100644 index 00000000..39c327b2 --- /dev/null +++ b/modules/nf-core/modules/fastqc/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FASTQC { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0" + } else { + container "quay.io/biocontainers/fastqc:0.11.9--0" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*.html"), emit: html + tuple val(meta), path("*.zip") , emit: zip + path "*.version.txt" , emit: version + + script: + // Add soft-links to original FastQs for consistent naming in pipeline + def software = getSoftwareName(task.process) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if (meta.single_end) { + """ + [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz + fastqc $options.args --threads $task.cpus ${prefix}.fastq.gz + fastqc --version | sed -e "s/FastQC v//g" > ${software}.version.txt + """ + } else { + """ + [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz + [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz + fastqc $options.args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz + fastqc --version | sed -e "s/FastQC v//g" > ${software}.version.txt + """ + } +} diff --git a/modules/nf-core/modules/fastqc/meta.yml b/modules/nf-core/modules/fastqc/meta.yml new file mode 100644 index 00000000..8eb9953d --- /dev/null +++ b/modules/nf-core/modules/fastqc/meta.yml @@ -0,0 +1,51 @@ +name: fastqc +description: Run FastQC on sequenced reads +keywords: + - quality control + - qc + - adapters + - fastq +tools: + - fastqc: + description: | + FastQC gives general quality metrics about your reads. + It provides information about the quality score distribution + across your reads, the per base sequence content (%A/C/G/T). + You get information about adapter contamination and other + overrepresented sequences. + homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ + documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - html: + type: file + description: FastQC report + pattern: "*_{fastqc.html}" + - zip: + type: file + description: FastQC report archive + pattern: "*_{fastqc.zip}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@drpatelh" + - "@grst" + - "@ewels" + - "@FelixKrueger" diff --git a/modules/nf-core/modules/multiqc/functions.nf b/modules/nf-core/modules/multiqc/functions.nf new file mode 100644 index 00000000..da9da093 --- /dev/null +++ b/modules/nf-core/modules/multiqc/functions.nf @@ -0,0 +1,68 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + if (!args.filename.endsWith('.version.txt')) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } + } +} diff --git a/modules/nf-core/modules/multiqc/main.nf b/modules/nf-core/modules/multiqc/main.nf new file mode 100644 index 00000000..da780800 --- /dev/null +++ b/modules/nf-core/modules/multiqc/main.nf @@ -0,0 +1,35 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MULTIQC { + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + + conda (params.enable_conda ? "bioconda::multiqc=1.10.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/multiqc:1.10.1--py_0" + } else { + container "quay.io/biocontainers/multiqc:1.10.1--py_0" + } + + input: + path multiqc_files + + output: + path "*multiqc_report.html", emit: report + path "*_data" , emit: data + path "*_plots" , optional:true, emit: plots + path "*.version.txt" , emit: version + + script: + def software = getSoftwareName(task.process) + """ + multiqc -f $options.args . + multiqc --version | sed -e "s/multiqc, version //g" > ${software}.version.txt + """ +} diff --git a/modules/nf-core/modules/multiqc/meta.yml b/modules/nf-core/modules/multiqc/meta.yml new file mode 100644 index 00000000..532a8bb1 --- /dev/null +++ b/modules/nf-core/modules/multiqc/meta.yml @@ -0,0 +1,39 @@ +name: MultiQC +description: Aggregate results from bioinformatics analyses across many samples into a single report +keywords: + - QC + - bioinformatics tools + - Beautiful stand-alone HTML report +tools: + - multiqc: + description: | + MultiQC searches a given directory for analysis logs and compiles a HTML report. + It's a general use tool, perfect for summarising the output from numerous bioinformatics tools. + homepage: https://multiqc.info/ + documentation: https://multiqc.info/docs/ +input: + - multiqc_files: + type: file + description: | + List of reports / files recognised by MultiQC, for example the html and zip output of FastQC +output: + - report: + type: file + description: MultiQC report file + pattern: "multiqc_report.html" + - data: + type: dir + description: MultiQC data dir + pattern: "multiqc_data" + - plots: + type: file + description: Plots created by MultiQC + pattern: "*_data" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@abhi18av" + - "@bunop" + - "@drpatelh" diff --git a/nextflow.config b/nextflow.config index a27c4e4d..f99f3b9d 100644 --- a/nextflow.config +++ b/nextflow.config @@ -1,130 +1,136 @@ /* - * ------------------------------------------------- - * nf-core/mhcquant Nextflow config file - * ------------------------------------------------- - * Default config options for all environments. - */ +======================================================================================== + nf-core/mhcquant Nextflow config file +======================================================================================== + Default config options for all compute environments +---------------------------------------------------------------------------------------- +*/ // Global default params, used in configs params { - // Workflow flags - // TODO nf-core: Specify your pipeline's command line flags - genome = false - input = null - input_paths = null - single_end = false - outdir = './results' - publish_dir_mode = 'copy' - - // Boilerplate options - multiqc_config = false - email = false - email_on_fail = false - max_multiqc_email_size = 25.MB - plaintext_email = false - monochrome_logs = false - help = false - igenomes_base = 's3://ngi-igenomes/igenomes' - tracedir = "${params.outdir}/pipeline_info" - igenomes_ignore = false - custom_config_version = 'master' - custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" - hostnames = false - config_profile_name = null - config_profile_description = false - config_profile_contact = false - config_profile_url = false - validate_params = true - show_hidden_params = false - schema_ignore_params = 'genomes,input_paths' - - // Defaults only, expecting to be overwritten - max_memory = 128.GB - max_cpus = 16 - max_time = 240.h + // TODO nf-core: Specify your pipeline's command line flags + // Input options + input = null -} + // References + genome = null + igenomes_base = 's3://ngi-igenomes/igenomes' + igenomes_ignore = false + + // MultiQC options + multiqc_config = null + multiqc_title = null + max_multiqc_email_size = '25.MB' + + // Boilerplate options + outdir = './results' + tracedir = "${params.outdir}/pipeline_info" + publish_dir_mode = 'copy' + email = null + email_on_fail = null + plaintext_email = false + monochrome_logs = false + help = false + validate_params = true + show_hidden_params = false + schema_ignore_params = 'genomes,modules' + enable_conda = false + singularity_pull_docker_container = false -// Container slug. Stable releases should specify release tag! -// Developmental code should specify :dev -process.container = 'nfcore/mhcquant:dev' + // Config options + custom_config_version = 'master' + custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" + hostnames = [:] + config_profile_description = null + config_profile_contact = null + config_profile_url = null + config_profile_name = null + + // Max resource options + // Defaults only, expecting to be overwritten + max_memory = '128.GB' + max_cpus = 16 + max_time = '240.h' + +} // Load base.config by default for all pipelines includeConfig 'conf/base.config' +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' + // Load nf-core custom profiles from different Institutions try { - includeConfig "${params.custom_config_base}/nfcore_custom.config" + includeConfig "${params.custom_config_base}/nfcore_custom.config" } catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") -} - -profiles { - conda { - docker.enabled = false - singularity.enabled = false - podman.enabled = false - shifter.enabled = false - charliecloud.enabled = false - process.conda = "$projectDir/environment.yml" - } - debug { process.beforeScript = 'echo $HOSTNAME' } - docker { - docker.enabled = true - singularity.enabled = false - podman.enabled = false - shifter.enabled = false - charliecloud.enabled = false - // Avoid this error: - // WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. - // Testing this in nf-core after discussion here https://github.com/nf-core/tools/pull/351 - // once this is established and works well, nextflow might implement this behavior as new default. - docker.runOptions = '-u \$(id -u):\$(id -g)' - } - singularity { - docker.enabled = false - singularity.enabled = true - podman.enabled = false - shifter.enabled = false - charliecloud.enabled = false - singularity.autoMounts = true - } - podman { - singularity.enabled = false - docker.enabled = false - podman.enabled = true - shifter.enabled = false - charliecloud.enabled = false - } - shifter { - singularity.enabled = false - docker.enabled = false - podman.enabled = false - shifter.enabled = true - charliecloud.enabled = false - } - charliecloud { - singularity.enabled = false - docker.enabled = false - podman.enabled = false - shifter.enabled = false - charliecloud.enabled = true - } - test { includeConfig 'conf/test.config' } - test_full { includeConfig 'conf/test_full.config' } + System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") } // Load igenomes.config if required if (!params.igenomes_ignore) { - includeConfig 'conf/igenomes.config' + includeConfig 'conf/igenomes.config' +} else { + params.genomes = [:] +} + +profiles { + debug { process.beforeScript = 'echo $HOSTNAME' } + conda { + params.enable_conda = true + docker.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + } + docker { + docker.enabled = true + docker.userEmulation = true + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + } + singularity { + singularity.enabled = true + singularity.autoMounts = true + docker.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false + } + podman { + podman.enabled = true + docker.enabled = false + singularity.enabled = false + shifter.enabled = false + charliecloud.enabled = false + } + shifter { + shifter.enabled = true + docker.enabled = false + singularity.enabled = false + podman.enabled = false + charliecloud.enabled = false + } + charliecloud { + charliecloud.enabled = true + docker.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + } + test { includeConfig 'conf/test.config' } + test_full { includeConfig 'conf/test_full.config' } } // Export these variables to prevent local Python/R libraries from conflicting with those in the container env { - PYTHONNOUSERSITE = 1 - R_PROFILE_USER = "/.Rprofile" - R_ENVIRON_USER = "/.Renviron" + PYTHONNOUSERSITE = 1 + R_PROFILE_USER = "/.Rprofile" + R_ENVIRON_USER = "/.Renviron" } // Capture exit codes from upstream processes when piping @@ -132,61 +138,61 @@ process.shell = ['/bin/bash', '-euo', 'pipefail'] def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { - enabled = true - file = "${params.tracedir}/execution_timeline_${trace_timestamp}.html" + enabled = true + file = "${params.tracedir}/execution_timeline_${trace_timestamp}.html" } report { - enabled = true - file = "${params.tracedir}/execution_report_${trace_timestamp}.html" + enabled = true + file = "${params.tracedir}/execution_report_${trace_timestamp}.html" } trace { - enabled = true - file = "${params.tracedir}/execution_trace_${trace_timestamp}.txt" + enabled = true + file = "${params.tracedir}/execution_trace_${trace_timestamp}.txt" } dag { - enabled = true - file = "${params.tracedir}/pipeline_dag_${trace_timestamp}.svg" + enabled = true + file = "${params.tracedir}/pipeline_dag_${trace_timestamp}.svg" } manifest { - name = 'nf-core/mhcquant' - author = 'Leon Bichmann' - homePage = 'https://github.com/nf-core/mhcquant' - description = 'Identify and quantify peptides from mass spectrometry raw data' - mainScript = 'main.nf' - nextflowVersion = '>=20.04.0' - version = '1.6.1' + name = 'nf-core/mhcquant' + author = 'Leon Bichmann' + homePage = 'https://github.com/nf-core/mhcquant' + description = 'Identify and quantify peptides from mass spectrometry raw data' + mainScript = 'main.nf' + nextflowVersion = '!>=21.04.0' + version = '1.6.1' } // Function to ensure that resource requirements don't go beyond // a maximum limit def check_max(obj, type) { - if (type == 'memory') { - try { - if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) - return params.max_memory as nextflow.util.MemoryUnit - else - return obj - } catch (all) { - println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'time') { - try { - if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) - return params.max_time as nextflow.util.Duration - else - return obj - } catch (all) { - println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'cpus') { - try { - return Math.min( obj, params.max_cpus as int ) - } catch (all) { - println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" - return obj + if (type == 'memory') { + try { + if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) + return params.max_memory as nextflow.util.MemoryUnit + else + return obj + } catch (all) { + println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" + return obj + } + } else if (type == 'time') { + try { + if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) + return params.max_time as nextflow.util.Duration + else + return obj + } catch (all) { + println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" + return obj + } + } else if (type == 'cpus') { + try { + return Math.min( obj, params.max_cpus as int ) + } catch (all) { + println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" + return obj + } } - } } diff --git a/nextflow_schema.json b/nextflow_schema.json index 16572fdc..f997fee6 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -16,19 +16,17 @@ "properties": { "input": { "type": "string", - "fa_icon": "fas fa-dna", - "description": "Input FastQ files.", - "help_text": "Use this to specify the location of your input FastQ files. For example:\n\n```bash\n--input 'path/to/data/sample_*_{1,2}.fastq'\n```\n\nPlease note the following requirements:\n\n1. The path must be enclosed in quotes\n2. The path must have at least one `*` wildcard character\n3. When using the pipeline with paired end data, the path must use `{1,2}` notation to specify read pairs.\n\nIf left unspecified, a default pattern is used: `data/*{1,2}.fastq.gz`" - }, - "single_end": { - "type": "boolean", - "description": "Specifies that the input is single-end reads.", - "fa_icon": "fas fa-align-center", - "help_text": "By default, the pipeline expects paired-end data. If you have single-end data, you need to specify `--single_end` on the command line when you launch the pipeline. A normal glob pattern, enclosed in quotation marks, can then be used for `--input`. For example:\n\n```bash\n--single_end --input '*.fastq'\n```\n\nIt is not possible to run a mixture of single-end and paired-end files in one run." + "format": "file-path", + "mimetype": "text/csv", + "pattern": "\\.csv$", + "schema": "assets/schema_input.json", + "description": "Path to comma-separated file containing information about the samples in the experiment.", + "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row. See [usage docs](https://nf-co.re/mhcquant/usage#samplesheet-input).", + "fa_icon": "fas fa-file-csv" }, "outdir": { "type": "string", - "description": "The output directory where the results will be saved.", + "description": "Path to the output directory where the results will be saved.", "default": "./results", "fa_icon": "fas fa-folder-open" }, @@ -38,6 +36,11 @@ "fa_icon": "fas fa-envelope", "help_text": "Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run.", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$" + }, + "multiqc_title": { + "type": "string", + "description": "MultiQC report title. Printed as page header, used for filename if not otherwise specified.", + "fa_icon": "fas fa-file-signature" } } }, @@ -45,22 +48,26 @@ "title": "Reference genome options", "type": "object", "fa_icon": "fas fa-dna", - "description": "Options for the reference genome indices used to align reads.", + "description": "Reference genome related files and options required for the workflow.", "properties": { "genome": { "type": "string", "description": "Name of iGenomes reference.", "fa_icon": "fas fa-book", - "help_text": "If using a reference genome configured in the pipeline using iGenomes, use this parameter to give the ID for the reference. This is then used to build the full paths for all required reference genome files e.g. `--genome GRCh38`.\n\nSee the [nf-core website docs](https://nf-co.re/usage/reference_genomes) for more details." + "help_text": "If using a reference genome configured in the pipeline using iGenomes, use this parameter to give the ID for the reference. This is then used to build the full paths for all required reference genome files e.g. `--genome GRCh38`. \n\nSee the [nf-core website docs](https://nf-co.re/usage/reference_genomes) for more details." }, "fasta": { "type": "string", - "fa_icon": "fas fa-font", + "format": "file-path", + "mimetype": "text/plain", + "pattern": "\\.fn?a(sta)?(\\.gz)?$", "description": "Path to FASTA genome file.", - "help_text": "If you have no genome reference available, the pipeline can build one using a FASTA file. This requires additional time and resources, so it's better to use a pre-build index if possible." + "help_text": "This parameter is *mandatory* if `--genome` is not specified. If you don't have a BWA index available this will be generated for you automatically. Combine with `--save_reference` to save BWA index for future runs.", + "fa_icon": "far fa-file-code" }, "igenomes_base": { "type": "string", + "format": "directory-path", "description": "Directory / URL base for iGenomes references.", "default": "s3://ngi-igenomes/igenomes", "fa_icon": "fas fa-cloud-download-alt", @@ -75,91 +82,57 @@ } } }, - "generic_options": { - "title": "Generic options", + "institutional_config_options": { + "title": "Institutional config options", "type": "object", - "fa_icon": "fas fa-file-import", - "description": "Less common options for the pipeline, typically set in a config file.", - "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", + "fa_icon": "fas fa-university", + "description": "Parameters used to describe centralised config profiles. These should not be edited.", + "help_text": "The centralised nf-core configuration profiles use a handful of pipeline parameters to describe themselves. This information is then printed to the Nextflow log when you run a pipeline. You should not need to change these values when you run a pipeline.", "properties": { - "help": { - "type": "boolean", - "description": "Display help text.", - "hidden": true, - "fa_icon": "fas fa-question-circle" - }, - "publish_dir_mode": { + "custom_config_version": { "type": "string", - "default": "copy", + "description": "Git commit id for Institutional configs.", + "default": "master", "hidden": true, - "description": "Method used to save pipeline results to output directory.", - "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", - "fa_icon": "fas fa-copy", - "enum": [ - "symlink", - "rellink", - "link", - "copy", - "copyNoFollow", - "move" - ] - }, - "validate_params": { - "type": "boolean", - "description": "Boolean whether to validate parameters against the schema at runtime", - "default": true, - "fa_icon": "fas fa-check-square", - "hidden": true + "fa_icon": "fas fa-users-cog" }, - "email_on_fail": { + "custom_config_base": { "type": "string", - "description": "Email address for completion summary, only when pipeline fails.", - "fa_icon": "fas fa-exclamation-triangle", - "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", + "description": "Base directory for Institutional configs.", + "default": "https://raw.githubusercontent.com/nf-core/configs/master", "hidden": true, - "help_text": "This works exactly as with `--email`, except emails are only sent if the workflow is not successful." + "help_text": "If you're running offline, Nextflow will not be able to fetch the institutional config files from the internet. If you don't need them, then this is not a problem. If you do need them, you should download the files from the repo and tell Nextflow where to find them with this parameter.", + "fa_icon": "fas fa-users-cog" }, - "plaintext_email": { - "type": "boolean", - "description": "Send plain-text email instead of HTML.", - "fa_icon": "fas fa-remove-format", + "hostnames": { + "type": "string", + "description": "Institutional configs hostname.", "hidden": true, - "help_text": "Set to receive plain-text e-mails instead of HTML formatted." + "fa_icon": "fas fa-users-cog" }, - "max_multiqc_email_size": { + "config_profile_name": { "type": "string", - "description": "File size limit when attaching MultiQC reports to summary emails.", - "default": "25.MB", - "fa_icon": "fas fa-file-upload", + "description": "Institutional config name.", "hidden": true, - "help_text": "If file generated by pipeline exceeds the threshold, it will not be attached." + "fa_icon": "fas fa-users-cog" }, - "monochrome_logs": { - "type": "boolean", - "description": "Do not use coloured log outputs.", - "fa_icon": "fas fa-palette", + "config_profile_description": { + "type": "string", + "description": "Institutional config description.", "hidden": true, - "help_text": "Set to disable colourful command line output and live life in monochrome." + "fa_icon": "fas fa-users-cog" }, - "multiqc_config": { + "config_profile_contact": { "type": "string", - "description": "Custom config file to supply to MultiQC.", - "fa_icon": "fas fa-cog", - "hidden": true + "description": "Institutional config contact information.", + "hidden": true, + "fa_icon": "fas fa-users-cog" }, - "tracedir": { + "config_profile_url": { "type": "string", - "description": "Directory to keep pipeline Nextflow logs and reports.", - "default": "${params.outdir}/pipeline_info", - "fa_icon": "fas fa-cogs", - "hidden": true - }, - "show_hidden_params": { - "type": "boolean", - "fa_icon": "far fa-eye-slash", - "description": "Show all params when using `--help`", + "description": "Institutional config URL link.", "hidden": true, - "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." + "fa_icon": "fas fa-users-cog" } } }, @@ -172,7 +145,7 @@ "properties": { "max_cpus": { "type": "integer", - "description": "Maximum number of CPUs that can be requested for any single job.", + "description": "Maximum number of CPUs that can be requested for any single job.", "default": 16, "fa_icon": "fas fa-microchip", "hidden": true, @@ -198,58 +171,102 @@ } } }, - "institutional_config_options": { - "title": "Institutional config options", + "generic_options": { + "title": "Generic options", "type": "object", - "fa_icon": "fas fa-university", - "description": "Parameters used to describe centralised config profiles. These should not be edited.", - "help_text": "The centralised nf-core configuration profiles use a handful of pipeline parameters to describe themselves. This information is then printed to the Nextflow log when you run a pipeline. You should not need to change these values when you run a pipeline.", + "fa_icon": "fas fa-file-import", + "description": "Less common options for the pipeline, typically set in a config file.", + "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", "properties": { - "custom_config_version": { + "help": { + "type": "boolean", + "description": "Display help text.", + "fa_icon": "fas fa-question-circle", + "hidden": true + }, + "publish_dir_mode": { "type": "string", - "description": "Git commit id for Institutional configs.", - "default": "master", - "hidden": true, - "fa_icon": "fas fa-users-cog", - "help_text": "Provide git commit id for custom Institutional configs hosted at `nf-core/configs`. This was implemented for reproducibility purposes. Default: `master`.\n\n```bash\n## Download and use config file with following git commit id\n--custom_config_version d52db660777c4bf36546ddb188ec530c3ada1b96\n```" + "default": "copy", + "description": "Method used to save pipeline results to output directory.", + "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", + "fa_icon": "fas fa-copy", + "enum": [ + "symlink", + "rellink", + "link", + "copy", + "copyNoFollow", + "move" + ], + "hidden": true }, - "custom_config_base": { + "email_on_fail": { "type": "string", - "description": "Base directory for Institutional configs.", - "default": "https://raw.githubusercontent.com/nf-core/configs/master", - "hidden": true, - "help_text": "If you're running offline, nextflow will not be able to fetch the institutional config files from the internet. If you don't need them, then this is not a problem. If you do need them, you should download the files from the repo and tell nextflow where to find them with the `custom_config_base` option. For example:\n\n```bash\n## Download and unzip the config files\ncd /path/to/my/configs\nwget https://github.com/nf-core/configs/archive/master.zip\nunzip master.zip\n\n## Run the pipeline\ncd /path/to/my/data\nnextflow run /path/to/pipeline/ --custom_config_base /path/to/my/configs/configs-master/\n```\n\n> Note that the nf-core/tools helper package has a `download` command to download all required pipeline files + singularity containers + institutional configs in one go for you, to make this process easier.", - "fa_icon": "fas fa-users-cog" + "description": "Email address for completion summary, only when pipeline fails.", + "fa_icon": "fas fa-exclamation-triangle", + "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", + "help_text": "An email address to send a summary email to when the pipeline is completed - ONLY sent if the pipeline does not exit successfully.", + "hidden": true }, - "hostnames": { + "plaintext_email": { + "type": "boolean", + "description": "Send plain-text email instead of HTML.", + "fa_icon": "fas fa-remove-format", + "hidden": true + }, + "max_multiqc_email_size": { "type": "string", - "description": "Institutional configs hostname.", - "hidden": true, - "fa_icon": "fas fa-users-cog" + "description": "File size limit when attaching MultiQC reports to summary emails.", + "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", + "default": "25.MB", + "fa_icon": "fas fa-file-upload", + "hidden": true }, - "config_profile_name": { + "monochrome_logs": { + "type": "boolean", + "description": "Do not use coloured log outputs.", + "fa_icon": "fas fa-palette", + "hidden": true + }, + "multiqc_config": { "type": "string", - "description": "Institutional config name.", - "hidden": true, - "fa_icon": "fas fa-users-cog" + "description": "Custom config file to supply to MultiQC.", + "fa_icon": "fas fa-cog", + "hidden": true }, - "config_profile_description": { + "tracedir": { "type": "string", - "description": "Institutional config description.", + "description": "Directory to keep pipeline Nextflow logs and reports.", + "default": "${params.outdir}/pipeline_info", + "fa_icon": "fas fa-cogs", + "hidden": true + }, + "validate_params": { + "type": "boolean", + "description": "Boolean whether to validate parameters against the schema at runtime", + "default": true, + "fa_icon": "fas fa-check-square", + "hidden": true + }, + "show_hidden_params": { + "type": "boolean", + "fa_icon": "far fa-eye-slash", + "description": "Show all params when using `--help`", "hidden": true, - "fa_icon": "fas fa-users-cog" + "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." }, - "config_profile_contact": { - "type": "string", - "description": "Institutional config contact information.", + "enable_conda": { + "type": "boolean", + "description": "Run this workflow with Conda. You can also use '-profile conda' instead of providing this parameter.", "hidden": true, - "fa_icon": "fas fa-users-cog" + "fa_icon": "fas fa-bacon" }, - "config_profile_url": { - "type": "string", - "description": "Institutional config URL link.", + "singularity_pull_docker_container": { + "type": "boolean", + "description": "Instead of directly downloading Singularity images for use with Singularity, force the workflow to pull and convert Docker containers instead.", "hidden": true, - "fa_icon": "fas fa-users-cog" + "fa_icon": "fas fa-toolbox", + "help_text": "This may be useful for example if you are unable to directly pull Singularity containers to run the pipeline due to http/https proxy issues." } } } @@ -262,13 +279,13 @@ "$ref": "#/definitions/reference_genome_options" }, { - "$ref": "#/definitions/generic_options" + "$ref": "#/definitions/institutional_config_options" }, { "$ref": "#/definitions/max_job_request_options" }, { - "$ref": "#/definitions/institutional_config_options" + "$ref": "#/definitions/generic_options" } ] } diff --git a/subworkflows/local/input_check.nf b/subworkflows/local/input_check.nf new file mode 100644 index 00000000..b664bc8c --- /dev/null +++ b/subworkflows/local/input_check.nf @@ -0,0 +1,42 @@ +// +// Check input samplesheet and get read channels +// + +params.options = [:] + +include { SAMPLESHEET_CHECK } from '../../modules/local/samplesheet_check' addParams( options: params.options ) + +workflow INPUT_CHECK { + take: + samplesheet // file: /path/to/samplesheet.csv + + main: + SAMPLESHEET_CHECK ( samplesheet ) + .splitCsv ( header:true, sep:',' ) + .map { create_fastq_channels(it) } + .set { reads } + + emit: + reads // channel: [ val(meta), [ reads ] ] +} + +// Function to get list of [ meta, [ fastq_1, fastq_2 ] ] +def create_fastq_channels(LinkedHashMap row) { + def meta = [:] + meta.id = row.sample + meta.single_end = row.single_end.toBoolean() + + def array = [] + if (!file(row.fastq_1).exists()) { + exit 1, "ERROR: Please check input samplesheet -> Read 1 FastQ file does not exist!\n${row.fastq_1}" + } + if (meta.single_end) { + array = [ meta, [ file(row.fastq_1) ] ] + } else { + if (!file(row.fastq_2).exists()) { + exit 1, "ERROR: Please check input samplesheet -> Read 2 FastQ file does not exist!\n${row.fastq_2}" + } + array = [ meta, [ file(row.fastq_1), file(row.fastq_2) ] ] + } + return array +} diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf new file mode 100644 index 00000000..80c22c6a --- /dev/null +++ b/workflows/mhcquant.nf @@ -0,0 +1,141 @@ +/* +======================================================================================== + VALIDATE INPUTS +======================================================================================== +*/ + +def summary_params = NfcoreSchema.paramsSummaryMap(workflow, params) + +// Validate input parameters +WorkflowMhcquant.initialise(params, log) + +// TODO nf-core: Add all file path parameters for the pipeline to the list below +// Check input path parameters to see if they exist +def checkPathParamList = [ params.input, params.multiqc_config, params.fasta ] +for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } } + +// Check mandatory parameters +if (params.input) { ch_input = file(params.input) } else { exit 1, 'Input samplesheet not specified!' } + +/* +======================================================================================== + CONFIG FILES +======================================================================================== +*/ + +ch_multiqc_config = file("$projectDir/assets/multiqc_config.yaml", checkIfExists: true) +ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config) : Channel.empty() + +/* +======================================================================================== + IMPORT LOCAL MODULES/SUBWORKFLOWS +======================================================================================== +*/ + +// Don't overwrite global params.modules, create a copy instead and use that within the main script. +def modules = params.modules.clone() + +// +// MODULE: Local to the pipeline +// +include { GET_SOFTWARE_VERSIONS } from '../modules/local/get_software_versions' addParams( options: [publish_files : ['tsv':'']] ) + +// +// SUBWORKFLOW: Consisting of a mix of local and nf-core/modules +// +include { INPUT_CHECK } from '../subworkflows/local/input_check' addParams( options: [:] ) + +/* +======================================================================================== + IMPORT NF-CORE MODULES/SUBWORKFLOWS +======================================================================================== +*/ + +def multiqc_options = modules['multiqc'] +multiqc_options.args += params.multiqc_title ? Utils.joinModuleArgs(["--title \"$params.multiqc_title\""]) : '' + +// +// MODULE: Installed directly from nf-core/modules +// +include { FASTQC } from '../modules/nf-core/modules/fastqc/main' addParams( options: modules['fastqc'] ) +include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' addParams( options: multiqc_options ) + +/* +======================================================================================== + RUN MAIN WORKFLOW +======================================================================================== +*/ + +// Info required for completion email and summary +def multiqc_report = [] + +workflow MHCQUANT { + + ch_software_versions = Channel.empty() + + // + // SUBWORKFLOW: Read in samplesheet, validate and stage input files + // + INPUT_CHECK ( + ch_input + ) + + // + // MODULE: Run FastQC + // + FASTQC ( + INPUT_CHECK.out.reads + ) + ch_software_versions = ch_software_versions.mix(FASTQC.out.version.first().ifEmpty(null)) + + // + // MODULE: Pipeline reporting + // + ch_software_versions + .map { it -> if (it) [ it.baseName, it ] } + .groupTuple() + .map { it[1][0] } + .flatten() + .collect() + .set { ch_software_versions } + + GET_SOFTWARE_VERSIONS ( + ch_software_versions.map { it }.collect() + ) + + // + // MODULE: MultiQC + // + workflow_summary = WorkflowMhcquant.paramsSummaryMultiqc(workflow, summary_params) + ch_workflow_summary = Channel.value(workflow_summary) + + ch_multiqc_files = Channel.empty() + ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_config)) + ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_custom_config.collect().ifEmpty([])) + ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) + ch_multiqc_files = ch_multiqc_files.mix(GET_SOFTWARE_VERSIONS.out.yaml.collect()) + ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([])) + + MULTIQC ( + ch_multiqc_files.collect() + ) + multiqc_report = MULTIQC.out.report.toList() + ch_software_versions = ch_software_versions.mix(MULTIQC.out.version.ifEmpty(null)) +} + +/* +======================================================================================== + COMPLETION EMAIL AND SUMMARY +======================================================================================== +*/ + +workflow.onComplete { + NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) + NfcoreTemplate.summary(workflow, params, log) +} + +/* +======================================================================================== + THE END +======================================================================================== +*/ From 468689608cea44910c24512f837b8f2ceb89784c Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 13 Jul 2021 15:30:45 +0000 Subject: [PATCH 002/227] Template update for nf-core/tools version 2.0.1 --- .github/workflows/linting.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 13b4fc81..fbde5cf8 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -127,7 +127,7 @@ jobs: GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }} - run: nf-core -l lint_log.txt lint ${GITHUB_WORKSPACE} --markdown lint_results.md + run: nf-core -l lint_log.txt lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md - name: Save PR number if: ${{ always() }} From a628a8f11934770a0b85ea7948cdefb1121ee274 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 27 Jul 2021 16:00:55 +0000 Subject: [PATCH 003/227] Template update for nf-core/tools version 2.1 --- .editorconfig | 3 +++ .github/CONTRIBUTING.md | 6 +++--- .github/PULL_REQUEST_TEMPLATE.md | 2 +- .github/workflows/linting.yml | 2 +- README.md | 2 +- bin/scrape_software_versions.py | 2 +- lib/NfcoreTemplate.groovy | 22 +++++++++++++--------- nextflow_schema.json | 4 ++-- workflows/mhcquant.nf | 4 +++- 9 files changed, 28 insertions(+), 19 deletions(-) diff --git a/.editorconfig b/.editorconfig index afb20bb1..95549501 100644 --- a/.editorconfig +++ b/.editorconfig @@ -11,6 +11,9 @@ indent_style = space [*.{yml,yaml}] indent_size = 2 +[*.json] +insert_final_newline = unset + # These files are edited and tested upstream in nf-core/modules [/modules/nf-core/**] charset = unset diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index aaa81423..05fd627c 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -19,7 +19,7 @@ If you'd like to write some code for nf-core/mhcquant, the standard workflow is * If there isn't one already, please create one so that others know you're working on this 2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [nf-core/mhcquant repository](https://github.com/nf-core/mhcquant) to your GitHub account 3. Make the necessary changes / additions within your forked repository following [Pipeline conventions](#pipeline-contribution-conventions) -4. Use `nf-core schema build .` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). +4. Use `nf-core schema build` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). 5. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged If you're not used to this workflow with git, you can start with some [docs from GitHub](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests) or even their [excellent `git` resources](https://try.github.io/). @@ -69,7 +69,7 @@ If you wish to contribute a new step, please use the following coding standards: 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new flags/options to `nextflow.config` with a default (see below). -5. Add any new flags/options to `nextflow_schema.json` with help text (with `nf-core schema build .`). +5. Add any new flags/options to `nextflow_schema.json` with help text (with `nf-core schema build`). 6. Add any new flags/options to the help message (for integer/text parameters, print to help the corresponding `nextflow.config` parameter). 7. Add sanity checks for all relevant parameters. 8. Add any new software to the `scrape_software_versions.py` script in `bin/` and the version command to the `scrape_software_versions` process in `main.nf`. @@ -83,7 +83,7 @@ If you wish to contribute a new step, please use the following coding standards: Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. -Once there, use `nf-core schema build .` to add to `nextflow_schema.json`. +Once there, use `nf-core schema build` to add to `nextflow_schema.json`. ### Default processes resource requirements diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 8c2143be..90bc342a 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -18,7 +18,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/mhcq - [ ] If you've fixed a bug or added code that should be tested, add tests! - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/mhcquant/tree/master/.github/CONTRIBUTING.md) - [ ] If necessary, also make a PR on the nf-core/mhcquant _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. -- [ ] Make sure your code lints (`nf-core lint .`). +- [ ] Make sure your code lints (`nf-core lint`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker`). - [ ] Usage Documentation in `docs/usage.md` is updated. - [ ] Output Documentation in `docs/output.md` is updated. diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index fbde5cf8..3b448773 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -53,7 +53,7 @@ jobs: - uses: actions/setup-node@v1 with: - node-version: "10" + node-version: '10' - name: Install editorconfig-checker run: npm install -g editorconfig-checker diff --git a/README.md b/README.md index 205bfe0f..02e8d3ae 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ On release, automated continuous integration tests run the pipeline on a full-si ## Quick Start -1. Install [`Nextflow`](https://nf-co.re/usage/installation) (`>=21.04.0`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.04.0`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_ diff --git a/bin/scrape_software_versions.py b/bin/scrape_software_versions.py index 8a5bd9ed..44a9c393 100755 --- a/bin/scrape_software_versions.py +++ b/bin/scrape_software_versions.py @@ -30,7 +30,7 @@ print("
{}
{}
".format(k, v)) print(" ") -# Write out regexes as csv file: +# Write out as tsv file: with open("software_versions.tsv", "w") as f: for k, v in sorted(results.items()): f.write("{}\t{}\n".format(k, v)) diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index b6e689ec..44551e0a 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -24,17 +24,21 @@ class NfcoreTemplate { public static void hostName(workflow, params, log) { Map colors = logColours(params.monochrome_logs) if (params.hostnames) { - def hostname = "hostname".execute().text.trim() - params.hostnames.each { prof, hnames -> - hnames.each { hname -> - if (hostname.contains(hname) && !workflow.profile.contains(prof)) { - log.info "=${colors.yellow}====================================================${colors.reset}=\n" + - "${colors.yellow}WARN: You are running with `-profile $workflow.profile`\n" + - " but your machine hostname is ${colors.white}'$hostname'${colors.reset}.\n" + - " ${colors.yellow_bold}Please use `-profile $prof${colors.reset}`\n" + - "=${colors.yellow}====================================================${colors.reset}=" + try { + def hostname = "hostname".execute().text.trim() + params.hostnames.each { prof, hnames -> + hnames.each { hname -> + if (hostname.contains(hname) && !workflow.profile.contains(prof)) { + log.info "=${colors.yellow}====================================================${colors.reset}=\n" + + "${colors.yellow}WARN: You are running with `-profile $workflow.profile`\n" + + " but your machine hostname is ${colors.white}'$hostname'${colors.reset}.\n" + + " ${colors.yellow_bold}Please use `-profile $prof${colors.reset}`\n" + + "=${colors.yellow}====================================================${colors.reset}=" + } } } + } catch (Exception e) { + log.warn "[$workflow.manifest.name] Could not determine 'hostname' - skipping check. Reason: ${e.message}." } } } diff --git a/nextflow_schema.json b/nextflow_schema.json index f997fee6..f729b335 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -18,7 +18,7 @@ "type": "string", "format": "file-path", "mimetype": "text/csv", - "pattern": "\\.csv$", + "pattern": "^\\S+\\.csv$", "schema": "assets/schema_input.json", "description": "Path to comma-separated file containing information about the samples in the experiment.", "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row. See [usage docs](https://nf-co.re/mhcquant/usage#samplesheet-input).", @@ -60,7 +60,7 @@ "type": "string", "format": "file-path", "mimetype": "text/plain", - "pattern": "\\.fn?a(sta)?(\\.gz)?$", + "pattern": "^\\S+\\.fn?a(sta)?(\\.gz)?$", "description": "Path to FASTA genome file.", "help_text": "This parameter is *mandatory* if `--genome` is not specified. If you don't have a BWA index available this will be generated for you automatically. Combine with `--save_reference` to save BWA index for future runs.", "fa_icon": "far fa-file-code" diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index 80c22c6a..ffaf7407 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -130,7 +130,9 @@ workflow MHCQUANT { */ workflow.onComplete { - NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) + if (params.email || params.email_on_fail) { + NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) + } NfcoreTemplate.summary(workflow, params, log) } From 35761c1f86d2bba056199ed291a18f39a00853e0 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 4 Oct 2021 17:18:39 +0200 Subject: [PATCH 004/227] Propose a solution for issue #176 --- workflows/mhcquant.nf | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index 7d3c0b54..8745421b 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -283,7 +283,7 @@ workflow MHCQUANT { .groupTuple(by: [0]) } else { - ch_proceeding_idx = OPENMS_PEPTIDEINDEXER.out.idXML + ch_proceeding_idx = OPENMS_PEPTIDEINDEXER.out.idxml .map { meta, raw -> [[id:meta.sample + "_" + meta.condition, sample:meta.sample, condition:meta.condition, ext:meta.ext], raw] @@ -324,14 +324,14 @@ workflow MHCQUANT { filter_q_value = OPENMS_IDFILTER_Q_VALUE.out.idxml.map{ it -> [it[0].sample, it[0], it[1]] } } - OPENMS_IDFILTER_FOR_ALIGNMENT.out[0] - .join( OPENMS_MAPRTTRANSFORMERMZML.out[0], by: [0] ) - .map { it -> [it[0].sample, it[0], it[1], it[2]] } - .combine( filter_q_value , by: [0] ) - .map { it -> [it[1], it[2], it[3], it[5]] } - .set{ joined_mzmls_ids_quant } - if ( !params.skip_quantification) { + // Combining the necessary information into one channel + OPENMS_IDFILTER_FOR_ALIGNMENT.out[0] + .join( OPENMS_MAPRTTRANSFORMERMZML.out[0], by: [0] ) + .map { it -> [it[0].sample, it[0], it[1], it[2]] } + .combine( filter_q_value , by: [0] ) + .map { it -> [it[1], it[2], it[3], it[5]] } + .set{ joined_mzmls_ids_quant } // Quantify identifications using targeted feature extraction OPENMS_FEATUREFINDERIDENTIFICATION(joined_mzmls_ids_quant) // Link extracted features @@ -344,12 +344,17 @@ workflow MHCQUANT { .groupTuple(by:[0])) // Resolve conflicting ids matching to the same feature OPENMS_IDCONFLICTRESOLVER(OPENMS_FEATURELINKERUNLABELEDKD.out.consensusxml) + // Assign the outcome of the id conflict resolver as export content + export_content = OPENMS_IDCONFLICTRESOLVER.out.consensusxml + } else { + // Assign the outcome of the filter q value as export content + export_content = filter_q_value.map { it -> [it[1], it[2]] } } // Export all information as text to csv - OPENMS_TEXTEXPORTER(OPENMS_IDCONFLICTRESOLVER.out.consensusxml) + OPENMS_TEXTEXPORTER(export_content) // Export all information as mzTab - OPENMS_MZTABEXPORTER(OPENMS_IDCONFLICTRESOLVER.out.consensusxml) + OPENMS_MZTABEXPORTER(export_content) ////////////////////////////////////////////////////////////////////////////////////////////// // TODO: Replacement of custom scripts with epytope ch_predicted_possible_neoepitopes = Channel.empty() From a4850955661d96fd869fbcd5e08a458f439f6d10 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 11 Oct 2021 08:58:05 +0200 Subject: [PATCH 005/227] Update: (i) only proceed with the class prediction when the progress is done without using the --skip-quantification parameter, (ii) change output dir of featurelinkerunlabeledkd --- modules/local/openms_featurelinkerunlabeledkd.nf | 2 +- workflows/mhcquant.nf | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/local/openms_featurelinkerunlabeledkd.nf b/modules/local/openms_featurelinkerunlabeledkd.nf index c2c69e1d..4bcf00e1 100644 --- a/modules/local/openms_featurelinkerunlabeledkd.nf +++ b/modules/local/openms_featurelinkerunlabeledkd.nf @@ -10,7 +10,7 @@ process OPENMS_FEATURELINKERUNLABELEDKD { publishDir "${params.outdir}", mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'RT_prediction', publish_id:'RT_prediction') } + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } conda (params.enable_conda ? "bioconda::openms-thirdparty=2.5.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index 8745421b..62eef1c8 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -358,7 +358,7 @@ workflow MHCQUANT { ////////////////////////////////////////////////////////////////////////////////////////////// // TODO: Replacement of custom scripts with epytope ch_predicted_possible_neoepitopes = Channel.empty() - if ( params.predict_class_1 ) { + if ( params.predict_class_1 & !params.skip_quantification ) { // If specified predict peptides using MHCFlurry PREDICT_PEPTIDES_MHCFLURRY_CLASS_1( OPENMS_MZTABEXPORTER.out.mztab @@ -385,7 +385,7 @@ workflow MHCQUANT { } ch_predicted_possible_neoepitopes_II = Channel.empty() - if ( params.predict_class_2 ) { + if ( params.predict_class_2 & !params.skip_quantification ) { // Preprocess found peptides for MHCNuggets prediction class 2 PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2(OPENMS_MZTABEXPORTER.out.mztab) // Predict found peptides using MHCNuggets class 2 From 7c72d7d5dae0b08450731253cd5675b241fee12a Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Tue, 19 Oct 2021 16:11:16 +0200 Subject: [PATCH 006/227] small fixes and bump to 2.0.1 --- CHANGELOG.md | 14 ++++++++++++++ conf/base.config | 7 +++++++ .../local/openms_featurefinderidentification.nf | 2 +- nextflow.config | 4 ++-- 4 files changed, 24 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b4be8e18..f8bf02f1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # nf-core/mhcquant: Changelog +## v2.0.1 nf-core/mhcquant - 2021/10/19 + +### `Added` +* Includes the "process_intensive" in the conf/base.config +* Output is generated when the --skip-quantification is used (outcome q value filtering) +### `Fixed` + +* Changes output dir of featurelinkerunlabeledkd (RT_prediction > Intermediate_Results) +* [#165] - Raise memory requirements of FeatureFinderIdentification step +* [#176] - Pipeline crashes when setting the --skip_quantification flag +### `Dependencies` + +### `Deprecated` + ## v2.0.0 nf-core/mhcquant "Steel Beagle" - 2021/09/03 ### `Added` diff --git a/conf/base.config b/conf/base.config index 90315def..17dd5995 100644 --- a/conf/base.config +++ b/conf/base.config @@ -41,6 +41,13 @@ process { errorStrategy = 'retry' maxRetries = 10 } + + withLabel:process_intensive { + cpus = { check_max( 2 * task.attempt, 'cpus' ) } + memory = { check_max( 16.GB * task.attempt, 'memory' ) } + time = { check_max( 6.h * task.attempt, 'time' ) } + } + withName:get_software_versions { cache = false } diff --git a/modules/local/openms_featurefinderidentification.nf b/modules/local/openms_featurefinderidentification.nf index ffb0acdc..5449d287 100644 --- a/modules/local/openms_featurefinderidentification.nf +++ b/modules/local/openms_featurefinderidentification.nf @@ -6,7 +6,7 @@ options = initOptions(params.options) process OPENMS_FEATUREFINDERIDENTIFICATION { tag "$meta.id" - label 'process_low' + label 'process_intensive' publishDir "${params.outdir}", mode: params.publish_dir_mode, diff --git a/nextflow.config b/nextflow.config index 541c3117..ce73d890 100644 --- a/nextflow.config +++ b/nextflow.config @@ -203,7 +203,7 @@ manifest { description = 'Identify and quantify peptides from mass spectrometry raw data' mainScript = 'main.nf' nextflowVersion = '>=21.04.0' - version = '2.0.0' + version = '2.0.1dev' } // Function to ensure that resource requirements don't go beyond @@ -237,4 +237,4 @@ def check_max(obj, type) { return obj } } -} +} \ No newline at end of file From 8724124eb7cefe187d4b94744198cd52328525a5 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 25 Oct 2021 17:56:08 +0200 Subject: [PATCH 007/227] bug fixing, adjusting the version outcome and updating versiion of openms(-thirdparty), mhcnuggets and mhcflurry --- .github/markdownlint.yml | 12 -- .github/workflows/awstest.yml | 32 +++--- .markdownlint.yml | 2 + CHANGELOG.md | 23 +++- README.md | 29 +++-- assets/schema_input.json | 46 ++++++++ bin/check_samplesheet.py | 5 +- conf/base.config | 69 ++++++------ conf/modules.config | 40 ++++--- conf/test.config | 15 +-- conf/test_full.config | 15 +-- lib/NfcoreTemplate.groovy | 22 ++-- lib/WorkflowMain.groovy | 10 +- lib/WorkflowMhcquant.groovy | 2 +- modules.json | 8 +- modules/local/functions.nf | 98 +++++++++------- modules/local/openms_cometadapter.nf | 16 ++- modules/local/openms_decoydatabase.nf | 24 ++-- modules/local/openms_falsediscoveryrate.nf | 16 ++- .../openms_featurefinderidentification.nf | 18 +-- .../local/openms_featurelinkerunlabeledkd.nf | 16 +-- modules/local/openms_idconflictresolver.nf | 16 ++- modules/local/openms_idfilter.nf | 16 ++- modules/local/openms_idmerger.nf | 16 ++- .../local/openms_mapaligneridentification.nf | 16 ++- modules/local/openms_maprttransformer.nf | 16 ++- modules/local/openms_mztabexporter.nf | 16 ++- modules/local/openms_peakpickerhires.nf | 16 ++- modules/local/openms_peptideindexer.nf | 16 ++- modules/local/openms_percolatoradapter.nf | 16 ++- modules/local/openms_psmfeatureextractor.nf | 16 ++- modules/local/openms_rtmodel.nf | 16 ++- modules/local/openms_rtpredict.nf | 16 ++- modules/local/openms_textexporter.nf | 20 ++-- modules/local/openms_thermorawfileparser.nf | 15 ++- ...tprocess_neoepitopes_mhcnuggets_class_2.nf | 12 +- ...postprocess_peptides_mhcnuggets_class_2.nf | 12 +- .../predict_neoepitopes_mhcflurry_class_1.nf | 16 ++- .../predict_neoepitopes_mhcnuggets_class_2.nf | 10 +- .../predict_peptides_mhcflurry_class_1.nf | 14 ++- .../predict_peptides_mhcnuggets_class_2.nf | 12 +- .../predict_possible_class_2_neoepitopes.nf | 14 ++- modules/local/predict_possible_neoepitopes.nf | 14 ++- modules/local/predict_psms.nf | 16 ++- ...eprocess_neoepitopes_mhcnuggets_class_2.nf | 14 ++- .../preprocess_peptides_mhcnuggets_class_2.nf | 12 +- .../resolve_found_class_2_neoepitopes.nf | 14 ++- modules/local/resolve_found_neoepitopes.nf | 14 ++- modules/local/samplesheet_check.nf | 49 ++++---- modules/local/subworkflow/input_check.nf | 23 ---- .../custom/dumpsoftwareversions/functions.nf | 78 +++++++++++++ .../custom/dumpsoftwareversions/main.nf | 106 ++++++++++++++++++ .../custom/dumpsoftwareversions/meta.yml | 33 ++++++ nextflow.config | 6 +- nextflow_schema.json | 7 +- subworkflow/local/input_check.nf | 40 +++++++ .../local}/refine_fdr_on_predicted_subset.nf | 21 ++-- workflows/mhcquant.nf | 106 +++++++++++------- 58 files changed, 927 insertions(+), 461 deletions(-) delete mode 100644 .github/markdownlint.yml create mode 100644 assets/schema_input.json delete mode 100644 modules/local/subworkflow/input_check.nf create mode 100644 modules/nf-core/modules/custom/dumpsoftwareversions/functions.nf create mode 100644 modules/nf-core/modules/custom/dumpsoftwareversions/main.nf create mode 100644 modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml create mode 100644 subworkflow/local/input_check.nf rename {modules/local/subworkflow => subworkflow/local}/refine_fdr_on_predicted_subset.nf (65%) diff --git a/.github/markdownlint.yml b/.github/markdownlint.yml deleted file mode 100644 index 8d7eb53b..00000000 --- a/.github/markdownlint.yml +++ /dev/null @@ -1,12 +0,0 @@ -# Markdownlint configuration file -default: true -line-length: false -no-duplicate-header: - siblings_only: true -no-inline-html: - allowed_elements: - - img - - p - - kbd - - details - - summary diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index 1247ea86..acf54139 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -1,6 +1,5 @@ name: nf-core AWS test -# This workflow is triggered on push to the master branch. -# It can be additionally triggered manually with GitHub actions workflow dispatch. +# This workflow can be triggered manually with the GitHub actions workflow dispatch button. # It runs the -profile 'test' on AWS batch. on: @@ -15,26 +14,23 @@ env: AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} jobs: - run-awstest: + run-tower: name: Run AWS tests if: github.repository == 'nf-core/mhcquant' runs-on: ubuntu-latest steps: - name: Setup Miniconda uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - python-version: 3.7 - - name: Install awscli - run: conda install -c conda-forge awscli - - name: Start AWS batch job - # For example: adding multiple test runs with different parameters - # Remember that you can parallelise this by using strategy.matrix - run: | - aws batch submit-job \ - --region eu-west-1 \ - --job-name nf-core-mhcquant \ - --job-queue $AWS_JOB_QUEUE \ - --job-definition $AWS_JOB_DEFINITION \ - --container-overrides '{"command": ["nf-core/mhcquant", "-r '"${GITHUB_SHA}"' -profile test --outdir s3://'"${AWS_S3_BUCKET}"'/mhcquant/results-'"${GITHUB_SHA}"' -w s3://'"${AWS_S3_BUCKET}"'/mhcquant/work-'"${GITHUB_SHA}"' -with-tower"], "environment": [{"name": "TOWER_ACCESS_TOKEN", "value": "'"$TOWER_ACCESS_TOKEN"'"}]}' + with: + workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} + bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} + compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} + pipeline: ${{ github.repository }} + revision: ${{ github.sha }} + workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/mhcquant/work-${{ github.sha }} + parameters: | + { + "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/mhcquant/results-${{ github.sha }}" + } + profiles: '[ "test", "aws_tower" ]' diff --git a/.markdownlint.yml b/.markdownlint.yml index 9e605fcf..e7fc97a7 100644 --- a/.markdownlint.yml +++ b/.markdownlint.yml @@ -12,3 +12,5 @@ no-inline-html: - kbd - details - summary +single-title: + level: 2 diff --git a/CHANGELOG.md b/CHANGELOG.md index f8bf02f1..3ce21ccc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,17 +1,30 @@ # nf-core/mhcquant: Changelog -## v2.0.1 nf-core/mhcquant - 2021/10/19 +## v2.1.0 nf-core/mhcquant "Olive Tin Hamster" - 2021/10/25 ### `Added` -* Includes the "process_intensive" in the conf/base.config -* Output is generated when the --skip-quantification is used (outcome q value filtering) -### `Fixed` +* Inclusion of assets/schema_input.json -* Changes output dir of featurelinkerunlabeledkd (RT_prediction > Intermediate_Results) +### `Fixed` +* Fixed typos * [#165] - Raise memory requirements of FeatureFinderIdentification step * [#176] - Pipeline crashes when setting the --skip_quantification flag ### `Dependencies` +Note, since the pipeline is now using Nextflow DSL2, each process will be run with its own [Biocontainer](https://biocontainers.pro/#/registry). This means that on occasion it is entirely possible for the pipeline to be using different versions of the same tool. However, the overall software dependency changes compared to the last release have been listed below for reference. + + +| Dependency | Old version | New version | +|-----------------------|-------------|-------------| +| `openms` | 2.5.0 | 2.6.0 | +| `openms-thirdparty` | 2.5.0 | 2.6.0 | +| `thermorawfileparser` | 1.2.3 | 1.3.4 | +| `mhcflurry` | 1.4.3 | 2.0.1 | + +> **NB:** Dependency has been __updated__ if both old and new version information is present. +> **NB:** Dependency has been __added__ if just the new version information is present. +> **NB:** Dependency has been __removed__ if version information isn't present. + ### `Deprecated` ## v2.0.0 nf-core/mhcquant "Steel Beagle" - 2021/09/03 diff --git a/README.md b/README.md index df4296f4..ee23acd1 100644 --- a/README.md +++ b/README.md @@ -4,15 +4,20 @@ [![GitHub Actions CI Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/mhcquant/actions) [![GitHub Actions Linting Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/mhcquant/actions) +[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.5407955-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.1400710) + [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A521.04.0-brightgreen.svg)](https://www.nextflow.io/) +[![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) +[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) +[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) -[![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/) -[![Docker](https://img.shields.io/docker/automated/nfcore/mhcquant.svg)](https://hub.docker.com/r/nfcore/mhcquant) [![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23mhcquant-4A154B?logo=slack)](https://nfcore.slack.com/channels/mhcquant) +[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core) +[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) ## Introduction -nfcore/mhcquant is a bioinformatics analysis pipeline used for quantitative processing of data dependent (DDA) peptidomics data. +**nfcore/mhcquant** is a bioinformatics analysis pipeline used for quantitative processing of data dependent (DDA) peptidomics data. It was specifically designed to analyse immunopeptidomics data, which deals with the analysis of affinity purified, unspecifically cleaved peptides that have recently been discussed intensively in [the context of cancer vaccines](https://www.nature.com/articles/ncomms13404). @@ -25,17 +30,19 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool ## Quick Start -1. Install [`nextflow`](https://nf-co.re/usage/installation) (`>=21.04.0`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.04.0`) -2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_ +2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_. Note: This pipeline does not currently support running with Conda on macOS if the `--remove_ribo_rna` parameter is used because the latest version of the SortMeRNA package is not available for this platform. 3. Download the pipeline and test it on a minimal dataset with a single command: - ```bash - nextflow run nf-core/mhcquant -profile test, + ```console + nextflow run nf-core/rnaseq -profile test, ``` - > Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile ` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment. + > * Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile ` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment. + > * If you are using `singularity` then the pipeline will auto-detect this and attempt to download the Singularity images directly as opposed to performing a conversion from Docker images. If you are persistently observing issues downloading Singularity images directly due to timeout or network issues then please use the `--singularity_pull_docker_container` parameter to pull and convert the Docker image instead. Alternatively, it is highly recommended to use the [`nf-core download`](https://nf-co.re/tools/#downloading-pipelines-for-offline-use) command to pre-download all of the required containers before running the pipeline and to set the [`NXF_SINGULARITY_CACHEDIR` or `singularity.cacheDir`](https://www.nextflow.io/docs/latest/singularity.html?#singularity-docker-hub) Nextflow options to be able to store and re-use the images from a central location for future pipeline runs. + > * If you are using `conda`, it is highly recommended to use the [`NXF_CONDA_CACHEDIR` or `conda.cacheDir`](https://www.nextflow.io/docs/latest/conda.html) settings to store the environments in a central location for future pipeline runs. 4. Start running your own analysis! @@ -43,8 +50,8 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool nextflow run nf-core/mhcquant -profile test, --input 'samples.tsv' --fasta 'SWISSPROT_2020.fasta' - --allele_sheet 'alleles.tsv' - --predict_class_1 + --allele_sheet 'alleles.tsv' + --predict_class_1 --refine_fdr_on_predicted_subset ``` @@ -84,7 +91,7 @@ For further information or help, don't hesitate to get in touch on the [Slack `# ## Citations -If you use `nf-core/mhcquant` for your analysis, please cite: +If you use `nf-core/mhcquant` for your analysis, please cite it using the following doi: [10.5281/zenodo.5407955](https://doi.org/10.5281/zenodo.5407955) and the corresponding manuscript: > **MHCquant: Automated and Reproducible Data Analysis for Immunopeptidomics** > diff --git a/assets/schema_input.json b/assets/schema_input.json new file mode 100644 index 00000000..134d1b43 --- /dev/null +++ b/assets/schema_input.json @@ -0,0 +1,46 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://raw.githubusercontent.com/nf-core/mhcquant/master/assets/schema_input.json", + "title": "nf-core/mhcquant pipeline - params.input schema", + "description": "Schema for the file provided with params.input", + "type": "array", + "items": { + "type": "object", + "properties": { + "ID": { + "type": "integer", + "errorMessage": "Provide an unique identifier for the replicate, must be a numeric value" + }, + "Sample": { + "type": "string", + "pattern": "^\\S+-?", + "errorMessage": "Sample name must be provided and cannot contain spaces" + }, + "Condition": { + "type": "string", + "pattern": "^\\S+\\.f(ast)?q\\.gz$", + "errorMessage": "Sample condition must be provided and cannot contain spaces" + }, + "ReplicateFileName": { + "type": "string", + "errorMessage": "MS file spaces and must have extension '.raw' or '.mzml'", + "anyOf": [ + { + "type": "string", + "pattern": "^\\S+-?\\.raw$" + }, + { + "type": "string", + "pattern": "^\\S+-?\\.mzml$" + } + ] + } + }, + "required": [ + "ID", + "Sample", + "Condition", + "ReplicateFileName" + ] + } +} diff --git a/bin/check_samplesheet.py b/bin/check_samplesheet.py index 0176acf3..0d764b5b 100755 --- a/bin/check_samplesheet.py +++ b/bin/check_samplesheet.py @@ -1,8 +1,5 @@ #!/usr/bin/env python -# This script is based on the example at: https://raw.githubusercontent.com/nf-core/test-datasets/atacseq/design.csv - - import os import sys import errno @@ -10,7 +7,7 @@ def parse_args(args=None): - Description = "Reformat nf-core/MHCquant samplesheet file and check its contents." + Description = "Reformat nf-core/mhcquant samplesheet file and check its contents." Epilog = "Example usage: python check_samplesheet.py " parser = argparse.ArgumentParser(description=Description, epilog=Epilog) diff --git a/conf/base.config b/conf/base.config index 17dd5995..6cafb77b 100644 --- a/conf/base.config +++ b/conf/base.config @@ -1,51 +1,52 @@ + /* - * ------------------------------------------------- - * nf-core/mhcquant Nextflow base config file - * ------------------------------------------------- - * A 'blank slate' config file, appropriate for general - * use on most high performance compute environments. - * Assumes that all software is installed and available - * on the PATH. Runs in `local` mode - all jobs will be - * run on the logged in environment. - */ +======================================================================================== + nf-core/mhcquant Nextflow base config file +======================================================================================== + A 'blank slate' config file, appropriate for general use on most high performance + compute environments. Assumes that all software is installed and available on + the PATH. Runs in `local` mode - all jobs will be run on the logged in environment. +---------------------------------------------------------------------------------------- +*/ process { - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 4.GB * task.attempt, 'memory' ) } - time = { check_max( 2.h * task.attempt, 'time' ) } + cpus = { check_max( 1 * task.attempt, 'cpus' ) } + memory = { check_max( 6.GB * task.attempt, 'memory' ) } + time = { check_max( 4.h * task.attempt, 'time' ) } errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' } - maxRetries = 1 - maxErrors = '-1' + maxRetries = 1 + maxErrors = '-1' // Process-specific resource requirements - // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors withLabel:process_low { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 14.GB * task.attempt, 'memory' ) } - time = { check_max( 6.h * task.attempt, 'time' ) } + cpus = { check_max( 2 * task.attempt, 'cpus' ) } + memory = { check_max( 12.GB * task.attempt, 'memory' ) } + time = { check_max( 4.h * task.attempt, 'time' ) } } withLabel:process_medium { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 8.GB * task.attempt, 'memory' ) } - time = { check_max( 2.h * task.attempt, 'time' ) } + cpus = { check_max( 6 * task.attempt, 'cpus' ) } + memory = { check_max( 36.GB * task.attempt, 'memory' ) } + time = { check_max( 8.h * task.attempt, 'time' ) } } - withLabel:process_medium_long { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 8.GB * task.attempt, 'memory' ) } - time = { check_max( 8.h * task.attempt, 'time' ) } + withLabel:process_high { + cpus = { check_max( 12 * task.attempt, 'cpus' ) } + memory = { check_max( 72.GB * task.attempt, 'memory' ) } + time = { check_max( 16.h * task.attempt, 'time' ) } } - withLabel:process_web { - time = { check_max( 6.h * task.attempt, 'time' ) } - errorStrategy = 'retry' - maxRetries = 10 + withLabel:process_long { + time = { check_max( 20.h * task.attempt, 'time' ) } } - - withLabel:process_intensive { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 16.GB * task.attempt, 'memory' ) } - time = { check_max( 6.h * task.attempt, 'time' ) } + withLabel:process_high_memory { + memory = { check_max( 200.GB * task.attempt, 'memory' ) } + } + withLabel:error_ignore { + errorStrategy = 'ignore' + } + withLabel:error_retry { + errorStrategy = 'retry' + maxRetries = 2 } withName:get_software_versions { diff --git a/conf/modules.config b/conf/modules.config index 954bcb81..43d40c6f 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -1,20 +1,24 @@ /* - * -------------------------------------------------- - * Config file for defining DSL2 per module options - * -------------------------------------------------- - * - * Available keys to override module options: - * args = Additional arguments appended to command in module. - * args2 = Second set of arguments appended to command in module (multi-tool modules). - * publish_dir = Directory to publish results. - * publish_by_id = Publish results in separate folders by meta.id value. - * publish_files = Groovy map where key = "file_ext" and value = "directory" to publish results for that file extension - * The value of "directory" is appended to the standard "publish_dir" path as defined above. - * If publish_files == null (unspecified) - All files are published. - * If publish_files == false - No files are published. - * suffix = File name suffix for output files. - * - */ +======================================================================================== + Config file for defining DSL2 per module options +======================================================================================== + Available keys to override module options: + args = Additional arguments appended to command in module. + args2 = Second set of arguments appended to command in module (multi-tool modules). + args3 = Third set of arguments appended to command in module (multi-tool modules). + publish_dir = Directory to publish results. + publish_by_meta = Groovy list of keys available in meta map to append as directories to "publish_dir" path + If publish_by_meta = true - Value of ${meta['id']} is appended as a directory to "publish_dir" path + If publish_by_meta = ['id', 'custompath'] - If "id" is in meta map and "custompath" isn't then "${meta['id']}/custompath/" + is appended as a directory to "publish_dir" path + If publish_by_meta = false / null - No directories are appended to "publish_dir" path + publish_files = Groovy map where key = "file_ext" and value = "directory" to publish results for that file extension + The value of "directory" is appended to the standard "publish_dir" path as defined above. + If publish_files = null (unspecified) - All files are published. + If publish_files = false - No files are published. + suffix = File name suffix for output files. +---------------------------------------------------------------------------------------- +*/ params { modules { @@ -23,7 +27,7 @@ params { } 'openms_comet_adapter' { - args = "-precursor_mass_tolerance ${params.precursor_mass_tolerance} -fragment_bin_tolerance ${params.fragment_mass_tolerance} -fragment_bin_offset ${params.fragment_bin_offset} -num_hits ${params.num_hits} -digest_mass_range ${params.digest_mass_range} -max_variable_mods_in_peptide ${params.number_mods} -allowed_missed_cleavages 0 -precursor_charge ${params.prec_charge} -activation_method ${params.activation_method} -variable_modifications ${params.variable_mods.tokenize(',').collect { "'${it}'" }.join(" ") } -fixed_modifications ${params.fixed_mods.tokenize(',').collect { "'${it}'"}.join(" ")} -enzyme '${params.enzyme}' -spectrum_batch_size ${params.spectrum_batch_size} " + args = "-precursor_mass_tolerance ${params.precursor_mass_tolerance} -fragment_mass_tolerance ${params.fragment_mass_tolerance} -fragment_bin_offset ${params.fragment_bin_offset} -num_hits ${params.num_hits} -digest_mass_range ${params.digest_mass_range} -max_variable_mods_in_peptide ${params.number_mods} -missed_cleavages 0 -precursor_charge ${params.prec_charge} -activation_method ${params.activation_method} -variable_modifications ${params.variable_mods.tokenize(',').collect { "'${it}'" }.join(" ") } -fixed_modifications ${params.fixed_mods.tokenize(',').collect { "'${it}'"}.join(" ")} -enzyme '${params.enzyme}' -spectrum_batch_size ${params.spectrum_batch_size} " } 'generate_proteins_from_vcf' { @@ -31,7 +35,7 @@ params { } 'percolator_adapter' { - args = "-seed 4711 -trainFDR 0.05 -testFDR 0.05 -enzyme no_enzyme -subset-max-train ${params.subset_max_train} -doc ${params.description_correct_features} " + args = "-seed 4711 -trainFDR 0.05 -testFDR 0.05 -enzyme no_enzyme -subset_max_train ${params.subset_max_train} -doc ${params.description_correct_features} " } 'id_filter' { diff --git a/conf/test.config b/conf/test.config index 5249a374..65ba87e3 100644 --- a/conf/test.config +++ b/conf/test.config @@ -1,11 +1,12 @@ /* - * ------------------------------------------------- - * Nextflow config file for running tests - * ------------------------------------------------- - * Defines bundled input files and everything required - * to run a fast and simple test. Use as follows: - * nextflow run nf-core/mhcquant -profile test, - */ +======================================================================================== + Nextflow config file for running minimal tests +======================================================================================== + Defines input files and everything required to run a fast and simple pipeline test. + Use as follows: + nextflow run nf-core/mhcquant -profile test, +---------------------------------------------------------------------------------------- +*/ params { config_profile_name = 'Test profile' diff --git a/conf/test_full.config b/conf/test_full.config index fb96dfb8..3f1c521b 100644 --- a/conf/test_full.config +++ b/conf/test_full.config @@ -1,11 +1,12 @@ /* - * ------------------------------------------------- - * Nextflow config file for running full-size tests - * ------------------------------------------------- - * Defines bundled input files and everything required - * to run a full size pipeline test. Use as follows: - * nextflow run nf-core/mhcquant -profile test_full, - */ +======================================================================================== + Nextflow config file for running full-size tests +======================================================================================== + Defines input files and everything required to run a full size pipeline test. + Use as follows: + nextflow run nf-core/mhcquant -profile test_full, +---------------------------------------------------------------------------------------- +*/ params { config_profile_name = 'Full test profile' diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index 44551e0a..cd64d8b7 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -72,17 +72,17 @@ class NfcoreTemplate { misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] - email_fields['version'] = workflow.manifest.version - email_fields['runName'] = workflow.runName - email_fields['success'] = workflow.success - email_fields['dateComplete'] = workflow.complete - email_fields['duration'] = workflow.duration - email_fields['exitStatus'] = workflow.exitStatus - email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - email_fields['errorReport'] = (workflow.errorReport ?: 'None') - email_fields['commandLine'] = workflow.commandLine - email_fields['projectDir'] = workflow.projectDir - email_fields['summary'] = summary << misc_fields + email_fields['version'] = workflow.manifest.version + email_fields['runName'] = workflow.runName + email_fields['success'] = workflow.success + email_fields['dateComplete'] = workflow.complete + email_fields['duration'] = workflow.duration + email_fields['exitStatus'] = workflow.exitStatus + email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + email_fields['errorReport'] = (workflow.errorReport ?: 'None') + email_fields['commandLine'] = workflow.commandLine + email_fields['projectDir'] = workflow.projectDir + email_fields['summary'] = summary << misc_fields // On success try attach the multiqc report def mqc_report = null diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 25ed0c6d..0b267f6f 100644 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -10,18 +10,18 @@ class WorkflowMain { public static String citation(workflow) { return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + - " https://doi.org/10.5281/zenodo.1400710\n\n" + + " https://doi.org/10.5281/zenodo.5407955\n\n" + "* The nf-core framework\n" + - " https://dx.doi.org/10.1038/s41587-020-0439-x\n" + - " https://rdcu.be/b1GjZ\n\n" + + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" } + // // Print help to screen if required // public static String help(workflow, params, log) { - def command = "nextflow run ${workflow.manifest.name} --input 'samples.tsv' --fasta 'SWISSPROT_2020.fasta' --allele_sheet 'alleles.tsv' --predict_class_1 --refine_fdr_on_predicted_subset" + def command = "nextflow run ${workflow.manifest.name} ⁠-⁠-⁠input 'samples.tsv' ⁠-⁠-⁠fasta 'SWISSPROT_2020.fasta' ⁠-⁠-⁠allele_sheet 'alleles.tsv' ⁠-⁠-⁠vcf_sheet 'variants.tsv' ⁠-⁠-⁠include_proteins_from_vcf ⁠-⁠-⁠predict_class_1 -profile docker" def help_string = '' help_string += NfcoreTemplate.logo(workflow, params.monochrome_logs) help_string += NfcoreSchema.paramsHelp(workflow, params, command) @@ -73,7 +73,7 @@ class WorkflowMain { // Check input has been provided if (!params.input) { - log.error "Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'" + log.error "Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.tsv'" System.exit(1) } } diff --git a/lib/WorkflowMhcquant.groovy b/lib/WorkflowMhcquant.groovy index 0febad33..f3b4e82f 100644 --- a/lib/WorkflowMhcquant.groovy +++ b/lib/WorkflowMhcquant.groovy @@ -1,5 +1,5 @@ // -// This file holds several functions specific to the workflow/rnaseq.nf in the nf-core/rnaseq pipeline +// This file holds several functions specific to the workflow/mhcquant.nf in the nf-core/mhcquant pipeline // class WorkflowMhcquant { diff --git a/modules.json b/modules.json index e5f4a905..e5d130db 100644 --- a/modules.json +++ b/modules.json @@ -1,5 +1,11 @@ { "name": "nf-core/mhcquant", "homePage": "https://github.com/nf-core/mhcquant", - "repos": {} + "repos": { + "nf-core/modules": { + "custom/dumpsoftwareversions": { + "git_sha": "84f2302920078b0cf7716b2a2e5fcc0be5c4531d" + } + } + } } \ No newline at end of file diff --git a/modules/local/functions.nf b/modules/local/functions.nf index 7e182723..2405335a 100644 --- a/modules/local/functions.nf +++ b/modules/local/functions.nf @@ -1,63 +1,81 @@ -/* - * ----------------------------------------------------- - * Utility functions used in nf-core DSL2 module files - * ----------------------------------------------------- - */ +// +// Utility functions used in nf-core DSL2 module files +// -/* - * Extract name of software tool from process name using $task.process - */ +// +// Extract name of software tool from process name using $task.process +// def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } -/* - * Function to initialise default values and to generate a Groovy Map of available options for nf-core modules - */ +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// def initOptions(Map args) { def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.publish_by_id = args.publish_by_id ?: false - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' return options } -/* - * Tidy up and join elements of a list to return a path string - */ +// +// Tidy up and join elements of a list to return a path string +// def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes return paths.join('/') } -/* - * Function to save/publish module results - */ +// +// Function to save/publish module results +// def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_id) { - path_list.add(args.publish_id) - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" } } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } } - /* * Function to check the file extension */ diff --git a/modules/local/openms_cometadapter.nf b/modules/local/openms_cometadapter.nf index 860e55b7..9ed1d1af 100644 --- a/modules/local/openms_cometadapter.nf +++ b/modules/local/openms_cometadapter.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_COMETADAPTER { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms-thirdparty:2.5.0--6" + container "https://depot.galaxyproject.org/singularity/openms-thirdparty:2.6.0--0" } else { - container "quay.io/biocontainers/openms-thirdparty:2.5.0--6" + container "quay.io/biocontainers/openms-thirdparty:2.6.0--0" } input: @@ -20,7 +20,7 @@ process OPENMS_COMETADAPTER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -31,6 +31,10 @@ process OPENMS_COMETADAPTER { -out ${prefix}.idXML \\ -database ${fasta} \\ -threads $task.cpus $options.args - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}-thirdparty.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_decoydatabase.nf b/modules/local/openms_decoydatabase.nf index 8161c32f..8aee9931 100644 --- a/modules/local/openms_decoydatabase.nf +++ b/modules/local/openms_decoydatabase.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_DECOYDATABASE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,17 +20,21 @@ process OPENMS_DECOYDATABASE { output: tuple val(meta), path("*_decoy.fasta"), emit: decoy - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_decoy" """ - DecoyDatabase -in ${fasta} \\ - -out ${prefix}.fasta \\ - -decoy_string DECOY_ \\ - -decoy_string_position prefix - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + DecoyDatabase -in ${fasta} \\ + -out ${prefix}.fasta \\ + -decoy_string DECOY_ \\ + -decoy_string_position prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_falsediscoveryrate.nf b/modules/local/openms_falsediscoveryrate.nf index 93363e45..8925c634 100644 --- a/modules/local/openms_falsediscoveryrate.nf +++ b/modules/local/openms_falsediscoveryrate.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_FALSEDISCOVERYRATE { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,7 +20,7 @@ process OPENMS_FALSEDISCOVERYRATE { output: tuple val(meta), path("*.idXML"), emit: idxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -31,6 +31,10 @@ process OPENMS_FALSEDISCOVERYRATE { -protein 'false' \\ -out ${prefix}.idXML \\ -threads ${task.cpus} - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_featurefinderidentification.nf b/modules/local/openms_featurefinderidentification.nf index 5449d287..f4724f19 100644 --- a/modules/local/openms_featurefinderidentification.nf +++ b/modules/local/openms_featurefinderidentification.nf @@ -1,22 +1,22 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) process OPENMS_FEATUREFINDERIDENTIFICATION { tag "$meta.id" - label 'process_intensive' + label 'process_medium' publishDir "${params.outdir}", mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -24,7 +24,7 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { output: tuple val(meta), path("*.featureXML"), emit: featurexml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -41,6 +41,10 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { -out ${prefix}.featureXML \\ -threads ${task.cpus} \\ $arguments - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_featurelinkerunlabeledkd.nf b/modules/local/openms_featurelinkerunlabeledkd.nf index 4bcf00e1..cf1122b5 100644 --- a/modules/local/openms_featurelinkerunlabeledkd.nf +++ b/modules/local/openms_featurelinkerunlabeledkd.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,11 +12,11 @@ process OPENMS_FEATURELINKERUNLABELEDKD { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms-thirdparty:2.5.0--6" + container "https://depot.galaxyproject.org/singularity/openms-thirdparty:2.6.0--0" } else { - container "quay.io/biocontainers/openms-thirdparty:2.5.0--6" + container "quay.io/biocontainers/openms-thirdparty:2.6.0--0" } input: @@ -24,7 +24,7 @@ process OPENMS_FEATURELINKERUNLABELEDKD { output: tuple val(meta), path("*.consensusXML"), emit: consensusxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -35,7 +35,9 @@ process OPENMS_FEATURELINKERUNLABELEDKD { -out '${prefix}.consensusXML' \\ -threads ${task.cpus} - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}-thirdparty.version.txt - + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_idconflictresolver.nf b/modules/local/openms_idconflictresolver.nf index 5d05d0c0..34f5ace0 100644 --- a/modules/local/openms_idconflictresolver.nf +++ b/modules/local/openms_idconflictresolver.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_IDCONFLICTRESOLVER { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,7 +20,7 @@ process OPENMS_IDCONFLICTRESOLVER { output: tuple val(meta), path("*.consensusXML"), emit: consensusxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -30,6 +30,10 @@ process OPENMS_IDCONFLICTRESOLVER { IDConflictResolver -in ${consensus} \\ -out ${prefix}.consensusXML \\ -threads ${task.cpus} - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_idfilter.nf b/modules/local/openms_idfilter.nf index 70ac8a8d..9e3f474c 100644 --- a/modules/local/openms_idfilter.nf +++ b/modules/local/openms_idfilter.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,11 +12,11 @@ process OPENMS_IDFILTER { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -24,7 +24,7 @@ process OPENMS_IDFILTER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -40,6 +40,10 @@ process OPENMS_IDFILTER { -out ${prefix}.idXML \\ -threads ${task.cpus} \\ $options.args ${whitelist} - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_idmerger.nf b/modules/local/openms_idmerger.nf index c61fa631..6877f9d8 100644 --- a/modules/local/openms_idmerger.nf +++ b/modules/local/openms_idmerger.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_IDMERGER { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,7 +20,7 @@ process OPENMS_IDMERGER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -32,6 +32,10 @@ process OPENMS_IDMERGER { -threads ${task.cpus} \\ -annotate_file_origin \\ -merge_proteins_add_PSMs - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_mapaligneridentification.nf b/modules/local/openms_mapaligneridentification.nf index 24714583..0e429768 100644 --- a/modules/local/openms_mapaligneridentification.nf +++ b/modules/local/openms_mapaligneridentification.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_MAPALIGNERIDENTIFICATION { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,7 +20,7 @@ process OPENMS_MAPALIGNERIDENTIFICATION { output: tuple val(meta), path("*.trafoXML"), emit: trafoxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -30,6 +30,10 @@ process OPENMS_MAPALIGNERIDENTIFICATION { MapAlignerIdentification -in ${idxml} \\ -trafo_out ${out_names} \\ $options.args - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_maprttransformer.nf b/modules/local/openms_maprttransformer.nf index e04b964f..bf9786f2 100644 --- a/modules/local/openms_maprttransformer.nf +++ b/modules/local/openms_maprttransformer.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_MAPRTTRANSFORMER { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,7 +20,7 @@ process OPENMS_MAPRTTRANSFORMER { output: tuple val(meta), path("*_aligned.*"), emit: aligned - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -31,6 +31,10 @@ process OPENMS_MAPRTTRANSFORMER { -trafo_in ${trafoxml} \\ -out ${meta.id}_aligned.${fileExt} \\ -threads $task.cpus - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_mztabexporter.nf b/modules/local/openms_mztabexporter.nf index 28884aa7..3162f862 100644 --- a/modules/local/openms_mztabexporter.nf +++ b/modules/local/openms_mztabexporter.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,11 +12,11 @@ process OPENMS_MZTABEXPORTER { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -24,7 +24,7 @@ process OPENMS_MZTABEXPORTER { output: tuple val(meta), path("*.mzTab"), emit: mztab - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -34,6 +34,10 @@ process OPENMS_MZTABEXPORTER { MzTabExporter -in ${mztab} \\ -out ${prefix}.mzTab \\ -threads ${task.cpus} - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_peakpickerhires.nf b/modules/local/openms_peakpickerhires.nf index 57b33d7e..b9bcb772 100644 --- a/modules/local/openms_peakpickerhires.nf +++ b/modules/local/openms_peakpickerhires.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_PEAKPICKERHIRES { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,7 +20,7 @@ process OPENMS_PEAKPICKERHIRES { output: tuple val(meta), path("*.mzML"), emit: mzml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -30,6 +30,10 @@ process OPENMS_PEAKPICKERHIRES { PeakPickerHiRes -in ${mzml} \\ -out ${prefix}.mzML \\ -algorithm:ms_levels ${params.pick_ms_levels} - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_peptideindexer.nf b/modules/local/openms_peptideindexer.nf index fb2ac0f5..1fee3360 100644 --- a/modules/local/openms_peptideindexer.nf +++ b/modules/local/openms_peptideindexer.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_PEPTIDEINDEXER { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,7 +20,7 @@ process OPENMS_PEPTIDEINDEXER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -33,6 +33,10 @@ process OPENMS_PEPTIDEINDEXER { -fasta ${fasta} \\ -decoy_string DECOY \\ -enzyme:specificity none - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_percolatoradapter.nf b/modules/local/openms_percolatoradapter.nf index 153d4bc0..c0b137bb 100644 --- a/modules/local/openms_percolatoradapter.nf +++ b/modules/local/openms_percolatoradapter.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,11 +12,11 @@ process OPENMS_PERCOLATORADAPTER { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms-thirdparty:2.5.0--6" + container "https://depot.galaxyproject.org/singularity/openms-thirdparty:2.6.0--0" } else { - container "quay.io/biocontainers/openms-thirdparty:2.5.0--6" + container "quay.io/biocontainers/openms-thirdparty:2.6.0--0" } input: @@ -24,7 +24,7 @@ process OPENMS_PERCOLATORADAPTER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -35,6 +35,10 @@ process OPENMS_PERCOLATORADAPTER { PercolatorAdapter -in ${psm} \\ -out ${prefix}.idXML \\ $options.args - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}-thirdparty.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_psmfeatureextractor.nf b/modules/local/openms_psmfeatureextractor.nf index 9a93672b..af04bd4a 100644 --- a/modules/local/openms_psmfeatureextractor.nf +++ b/modules/local/openms_psmfeatureextractor.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,11 +12,11 @@ process OPENMS_PSMFEATUREEXTRACTOR { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -24,7 +24,7 @@ process OPENMS_PSMFEATUREEXTRACTOR { output: tuple val(meta), path("*.idXML"), emit: idxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -34,6 +34,10 @@ process OPENMS_PSMFEATUREEXTRACTOR { PSMFeatureExtractor -in ${merged} \\ -out ${prefix}.idXML \\ -threads ${task.cpus} - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_rtmodel.nf b/modules/local/openms_rtmodel.nf index 429a9a95..8626cf1f 100644 --- a/modules/local/openms_rtmodel.nf +++ b/modules/local/openms_rtmodel.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -8,11 +8,11 @@ process OPENMS_RTMODEL { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: @@ -20,7 +20,7 @@ process OPENMS_RTMODEL { output: tuple val(meta), path("*_rt_training.txt"), path("*.paramXML"), path("*_trainset.txt"), emit: complete - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -32,6 +32,10 @@ process OPENMS_RTMODEL { -out ${prefix}_rt_training.txt \\ -out_oligo_params ${prefix}_params.paramXML \\ -out_oligo_trainset ${prefix}_trainset.txt - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_rtpredict.nf b/modules/local/openms_rtpredict.nf index cca312bc..40ea2ae0 100644 --- a/modules/local/openms_rtpredict.nf +++ b/modules/local/openms_rtpredict.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,11 +12,11 @@ process OPENMS_RTPREDICT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'RT_prediction', publish_id:'RT_prediction') } - conda (params.enable_conda ? "bioconda::openms-thirdparty=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms-thirdparty:2.5.0--6" + container "https://depot.galaxyproject.org/singularity/openms-thirdparty:2.6.0--0" } else { - container "quay.io/biocontainers/openms-thirdparty:2.5.0--6" + container "quay.io/biocontainers/openms-thirdparty:2.6.0--0" } input: @@ -24,7 +24,7 @@ process OPENMS_RTPREDICT { output: tuple val(meta), path("*.csv"), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -36,6 +36,10 @@ process OPENMS_RTPREDICT { -in_oligo_params ${rt_params} \\ -in_oligo_trainset ${trainset} \\ -out_text:file ${prefix}.csv - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}-thirdparty.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_textexporter.nf b/modules/local/openms_textexporter.nf index 72c955a8..bbd04f14 100644 --- a/modules/local/openms_textexporter.nf +++ b/modules/local/openms_textexporter.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,19 +12,19 @@ process OPENMS_TEXTEXPORTER { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'.', publish_id:'') } - conda (params.enable_conda ? "bioconda::openms=2.5.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.6.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/openms:2.5.0--h4afb90d_6" + container "https://depot.galaxyproject.org/singularity/openms:2.6.0--h4afb90d_0" } else { - container "quay.io/biocontainers/openms:2.5.0--h4afb90d_6" + container "quay.io/biocontainers/openms:2.6.0--h4afb90d_0" } input: tuple val(meta), path(consensus_resolved) output: - tuple val(meta), path("*.csv"), emit: csv - path "*.version.txt", emit: version + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -32,11 +32,15 @@ process OPENMS_TEXTEXPORTER { """ TextExporter -in ${consensus_resolved} \\ - -out ${prefix}.csv \\ + -out ${prefix}.tsv \\ -threads ${task.cpus} \\ -id:add_hit_metavalues 0 \\ -id:add_metavalues 0 \\ -id:peptides_only - echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/ .*\$//' &> ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } diff --git a/modules/local/openms_thermorawfileparser.nf b/modules/local/openms_thermorawfileparser.nf index 00481838..9ca958f1 100644 --- a/modules/local/openms_thermorawfileparser.nf +++ b/modules/local/openms_thermorawfileparser.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -10,9 +10,9 @@ process OPENMS_THERMORAWFILEPARSER { conda (params.enable_conda ? "bioconda::thermorawfileparser::1.2.3" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/thermorawfileparser:1.2.3--1" + container "https://depot.galaxyproject.org/singularity/thermorawfileparser:1.3.4--ha8f3691_0" } else { - container "quay.io/biocontainers/thermorawfileparser:1.2.3--1" + container "quay.io/biocontainers/thermorawfileparser:1.3.4--ha8f3691_0" } input: @@ -20,7 +20,7 @@ process OPENMS_THERMORAWFILEPARSER { output: tuple val(meta), path("*.mzML"), emit: mzml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def software = getSoftwareName(task.process) @@ -30,6 +30,11 @@ process OPENMS_THERMORAWFILEPARSER { ThermoRawFileParser.sh -i=${rawfile} \\ -f=2 \\ -b=${prefix}.mzML - ThermoRawFileParser.sh --version &> ThermoRawFileParser.version.txt + > ThermoRawFileParser.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + thermorawfileparser: \$(ThermoRawFileParser.sh --version) + END_VERSIONS """ } diff --git a/modules/local/postprocess_neoepitopes_mhcnuggets_class_2.nf b/modules/local/postprocess_neoepitopes_mhcnuggets_class_2.nf index 502928d1..22442cf8 100644 --- a/modules/local/postprocess_neoepitopes_mhcnuggets_class_2.nf +++ b/modules/local/postprocess_neoepitopes_mhcnuggets_class_2.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -14,7 +14,7 @@ process POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } - conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2--py_0" : null) + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" } else { @@ -27,12 +27,16 @@ process POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 { output: tuple val(meta), path("*.csv"), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: """ postprocess_neoepitopes_mhcnuggets.py --input ${predicted} --neoepitopes ${neoepitopes} - echo $VERSION > mhcnuggets.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/local/postprocess_peptides_mhcnuggets_class_2.nf b/modules/local/postprocess_peptides_mhcnuggets_class_2.nf index 2c817907..4fb76980 100644 --- a/modules/local/postprocess_peptides_mhcnuggets_class_2.nf +++ b/modules/local/postprocess_peptides_mhcnuggets_class_2.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -14,7 +14,7 @@ process POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } - conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2--py_0" : null) + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" } else { @@ -26,13 +26,17 @@ process POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 { output: tuple val(meta), path('*.csv'), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_postprocessed" """ postprocess_peptides_mhcnuggets.py --input ${peptides} --peptides_seq_ID ${peptide_to_geneID} --output ${prefix}.csv - echo $VERSION > mhcnuggets.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/local/predict_neoepitopes_mhcflurry_class_1.nf b/modules/local/predict_neoepitopes_mhcflurry_class_1.nf index 1bebedf7..071b7008 100644 --- a/modules/local/predict_neoepitopes_mhcflurry_class_1.nf +++ b/modules/local/predict_neoepitopes_mhcflurry_class_1.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,11 +12,11 @@ process PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_1_bindings', publish_id:'class_1_bindings') } - conda (params.enable_conda ? "bioconda::mhcflurry=1.4.3--py_0" : null) + conda (params.enable_conda ? "bioconda::mhcflurry=2.0.1" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mhcflurry:1.4.3--py_0" + container "https://depot.galaxyproject.org/singularity/mhcflurry:2.0.1--pyh864c0ab_0" } else { - container "quay.io/biocontainers/mhcflurry:1.4.3--py_0" + container "quay.io/biocontainers/mhcflurry:2.0.1--pyh864c0ab_0" } @@ -25,7 +25,7 @@ process PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1 { output: tuple val(meta), path("*.csv"), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${neoepitopes}_${meta}_${options.suffix}" : "${neoepitopes}_${meta}_predicted_neoepitopes_class_1" @@ -33,6 +33,10 @@ process PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1 { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_neoepitope_binding_prediction.py '${allotypes}' ${prefix}.csv - mhcflurry-predict --version &> mhcflurry.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + END_VERSIONS """ } diff --git a/modules/local/predict_neoepitopes_mhcnuggets_class_2.nf b/modules/local/predict_neoepitopes_mhcnuggets_class_2.nf index ed60d928..33baad40 100644 --- a/modules/local/predict_neoepitopes_mhcnuggets_class_2.nf +++ b/modules/local/predict_neoepitopes_mhcnuggets_class_2.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -22,13 +22,17 @@ process PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2 { output: tuple val(meta), path("*_predicted_neoepitopes_class_2"), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_predicted_neoepitopes_class_2" """ mhcnuggets_predict_peptides.py --peptides ${neoepitopes} --alleles '${alleles}' --output ${prefix} - echo $VERSION > mhcnuggets.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/local/predict_peptides_mhcflurry_class_1.nf b/modules/local/predict_peptides_mhcflurry_class_1.nf index 04b9821a..deaed97a 100644 --- a/modules/local/predict_peptides_mhcflurry_class_1.nf +++ b/modules/local/predict_peptides_mhcflurry_class_1.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,7 +27,7 @@ process PREDICT_PEPTIDES_MHCFLURRY_CLASS_1 { output: tuple val(meta), path("*predicted_peptides_class_1.csv"), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_predicted_peptides_class_1" @@ -35,8 +35,12 @@ process PREDICT_PEPTIDES_MHCFLURRY_CLASS_1 { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_predict_mztab.py '${alleles}' ${mztab} ${prefix}.csv - echo $VERSIONFRED2 > fred2.version.txt - echo $VERSIONMHCNUGGETS > mhcnuggets.version.txt - mhcflurry-predict --version &> mhcflurry.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcnuggets: \$(echo $VERSIONMHCNUGGETS) + FRED2: \$(echo $VERSIONFRED2) + END_VERSIONS """ } diff --git a/modules/local/predict_peptides_mhcnuggets_class_2.nf b/modules/local/predict_peptides_mhcnuggets_class_2.nf index 4fb51a6e..f33931f3 100644 --- a/modules/local/predict_peptides_mhcnuggets_class_2.nf +++ b/modules/local/predict_peptides_mhcnuggets_class_2.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -23,14 +23,18 @@ process PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2 { output: tuple val(meta), path("*_predicted_peptides_class_2"), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_predicted_peptides_class_2" """ mhcnuggets_predict_peptides.py --peptides ${peptides} --alleles '${alleles}' --output ${prefix} - echo $VERSIONFRED2 > fred2.version.txt - echo $VERSIONMHCNUGGETS > mhcnuggets.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo $VERSIONMHCNUGGETS) + FRED2: \$(echo $VERSIONFRED2) + END_VERSIONS """ } diff --git a/modules/local/predict_possible_class_2_neoepitopes.nf b/modules/local/predict_possible_class_2_neoepitopes.nf index 85eddb38..05fe85e6 100644 --- a/modules/local/predict_possible_class_2_neoepitopes.nf +++ b/modules/local/predict_possible_class_2_neoepitopes.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,15 +28,19 @@ process PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES { output: tuple val(meta), path("*.csv"), emit: csv tuple val(meta), path("${prefix}.txt"), emit: txt - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_vcf_neoepitopes_class2" """ vcf_neoepitope_predictor.py -t ${params.variant_annotation_style} -r ${params.variant_reference} -a '${alleles}' -minl ${params.peptide_min_length} -maxl ${params.peptide_max_length} -v ${vcf} -o ${prefix}.csv - echo $VERSIONFRED2 > fred2.version.txt - echo $VERSIONMHCNUGGETS > mhcnuggets.version.txt - mhcflurry-predict --version &> mhcflurry.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcnuggets: \$(echo $VERSIONMHCNUGGETS) + FRED2: \$(echo $VERSIONFRED2) + END_VERSIONS """ } diff --git a/modules/local/predict_possible_neoepitopes.nf b/modules/local/predict_possible_neoepitopes.nf index 07b5adf7..5c3ac414 100644 --- a/modules/local/predict_possible_neoepitopes.nf +++ b/modules/local/predict_possible_neoepitopes.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -28,15 +28,19 @@ process PREDICT_POSSIBLE_NEOEPITOPES { output: tuple val(meta), path("${prefix}.csv"), emit: csv tuple val(meta), path("${prefix}.txt"), emit: txt - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_vcf_neoepitopes_class1" """ vcf_neoepitope_predictor.py -t ${params.variant_annotation_style} -r ${params.variant_reference} -a '${alleles}' -minl ${params.peptide_min_length} -maxl ${params.peptide_max_length} -v ${vcf} -o ${prefix}.csv - echo $VERSIONFRED2 > fred2.version.txt - echo $VERSIONMHCNUGGETS > mhcnuggets.version.txt - mhcflurry-predict --version &> mhcflurry.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcnuggets: \$(echo $VERSIONMHCNUGGETS) + FRED2: \$(echo $VERSIONFRED2) + END_VERSIONS """ } diff --git a/modules/local/predict_psms.nf b/modules/local/predict_psms.nf index 6b95acec..bfa144c7 100644 --- a/modules/local/predict_psms.nf +++ b/modules/local/predict_psms.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -12,11 +12,11 @@ process PREDICT_PSMS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::fred2=2.0.6 bioconda::mhcflurry=1.4.3 bioconda::mhcnuggets=2.3.2" : null) + conda (params.enable_conda ? "bioconda::mhcflurry=2.0.1" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-689ae0756dd82c61400782baaa8a7a1c2289930d:a9e10ca22d4cbcabf6b54f0fb5d766ea16bb171e-0" + container "https://depot.galaxyproject.org/singularity/mhcflurry:2.0.1--pyh864c0ab_0" } else { - container "quay.io/biocontainers/mulled-v2-689ae0756dd82c61400782baaa8a7a1c2289930d:a9e10ca22d4cbcabf6b54f0fb5d766ea16bb171e-0" + container "quay.io/biocontainers/mhcflurry:2.0.1--pyh864c0ab_0" } input: @@ -24,7 +24,7 @@ process PREDICT_PSMS { output: tuple val(meta), path("*.idXML"), emit: idxml - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_peptide_filter" @@ -32,7 +32,11 @@ process PREDICT_PSMS { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '${allotypes}' ${perc_mztab} ${psm_mztab} ${prefix}.idXML - mhcflurry-predict --version &> mhcflurry.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + END_VERSIONS """ } diff --git a/modules/local/preprocess_neoepitopes_mhcnuggets_class_2.nf b/modules/local/preprocess_neoepitopes_mhcnuggets_class_2.nf index e770d025..96aa2125 100644 --- a/modules/local/preprocess_neoepitopes_mhcnuggets_class_2.nf +++ b/modules/local/preprocess_neoepitopes_mhcnuggets_class_2.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -10,7 +10,7 @@ process PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 { tag "$meta" label 'process_low' - conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2--py_0" : null) + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" } else { @@ -22,13 +22,19 @@ process PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 { output: tuple val(meta), path("*${prefix}*"), emit: preprocessed - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_mhcnuggets_preprocessed" """ preprocess_neoepitopes_mhcnuggets.py --neoepitopes ${neoepitopes} --output ${prefix} - echo $VERSION > mhcnuggets.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo $VERSION) + END_VERSIONS """ } + +// ${getSoftwareName(task.process)}: \$(echo $VERSION) diff --git a/modules/local/preprocess_peptides_mhcnuggets_class_2.nf b/modules/local/preprocess_peptides_mhcnuggets_class_2.nf index 945337de..1bbc52df 100644 --- a/modules/local/preprocess_peptides_mhcnuggets_class_2.nf +++ b/modules/local/preprocess_peptides_mhcnuggets_class_2.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -10,7 +10,7 @@ process PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 { tag "$meta" label 'process_low' - conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2--py_0" : null) + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" } else { @@ -23,13 +23,17 @@ process PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 { output: tuple val(meta), path("*_preprocessed_mhcnuggets_peptides"), emit: preprocessed tuple val(meta), path('*peptide_to_geneID*'), emit: geneID - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_preprocessed_mhcnuggets_peptides" """ preprocess_peptides_mhcnuggets.py --mztab ${mztab} --output ${prefix} - echo $VERSION > mhcnuggets.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo $VERSION) + END_VERSIONS """ } diff --git a/modules/local/resolve_found_class_2_neoepitopes.nf b/modules/local/resolve_found_class_2_neoepitopes.nf index caad6e93..0aec6214 100644 --- a/modules/local/resolve_found_class_2_neoepitopes.nf +++ b/modules/local/resolve_found_class_2_neoepitopes.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -29,15 +29,19 @@ process RESOLVE_FOUND_CLASS_2_NEOEPITOPES { output: tuple val(meta), path("*.csv"), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_found_neoepitopes_class_2" """ resolve_neoepitopes.py -n ${neoepitopes} -m ${mztab} -f csv -o ${prefix} - echo $VERSIONFRED2 > fred2.version.txt - echo $VERSIONMHCNUGGETS > mhcnuggets.version.txt - mhcflurry-predict --version &> mhcflurry.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcnuggets: \$(echo $VERSIONMHCNUGGETS) + FRED2: \$(echo $VERSIONFRED2) + END_VERSIONS """ } diff --git a/modules/local/resolve_found_neoepitopes.nf b/modules/local/resolve_found_neoepitopes.nf index 85ad65bf..2ee16c1b 100644 --- a/modules/local/resolve_found_neoepitopes.nf +++ b/modules/local/resolve_found_neoepitopes.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -27,15 +27,19 @@ process RESOLVE_FOUND_NEOEPITOPES { output: tuple val(meta), path("*.csv"), emit: csv - path "*.version.txt", emit: version + path "versions.yml", emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_found_neoepitopes_class_1" """ resolve_neoepitopes.py -n ${neoepitopes} -m ${mztab} -f csv -o ${prefix} - echo $VERSIONFRED2 > fred2.version.txt - echo $VERSIONMHCNUGGETS > mhcnuggets.version.txt - mhcflurry-predict --version &> mhcflurry.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcnuggets: \$(echo $VERSIONMHCNUGGETS) + FRED2: \$(echo $VERSIONFRED2) + END_VERSIONS """ } diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index 476f8c81..a67ffddb 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles } from './functions' +include { initOptions; saveFiles; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -7,44 +7,41 @@ options = initOptions(params.options) /* * Reformat design file and check validity */ + process SAMPLESHEET_CHECK { tag "$samplesheet" - label 'process_low' - publishDir "${params.outdir}", mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', publish_id:'') } + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) - container "quay.io/biocontainers/python:3.8.3" + conda (params.enable_conda ? "conda-forge::python=3.8.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/python:3.8.3" + } else { + container "quay.io/biocontainers/python:3.8.3" + } input: path samplesheet output: - path '*.csv' + path '*.csv' , emit: csv + path "versions.yml", emit: versions + script: // This script is bundled with the pipeline, in nf-core/rnaseq/bin/ + """ + check_samplesheet.py \\ + $samplesheet \\ + samplesheet.valid.csv + + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + python: \$(python --version | sed 's/Python //g') + END_VERSIONS - script: - """ - check_samplesheet.py $samplesheet samplesheet.valid.csv """ } -// Function to get list of [ meta, filenames ] -def get_samplesheet_paths(LinkedHashMap row) { - def meta = [:] - meta.id = row.ID - meta.sample = row.Sample - meta.condition = row.Condition - meta.ext = row.FileExt - - def array = [] - if (!file(row.Filename).exists()) { - exit 1, "ERROR: Please check input samplesheet -> MS file does not exist!\n${row.Filename}" - } else { - array = [ meta, file(row.Filename) ] - } - return array -} + // echo Python: \$(python --version) | sed 's/Python //g' &> versions.yml diff --git a/modules/local/subworkflow/input_check.nf b/modules/local/subworkflow/input_check.nf deleted file mode 100644 index ab1c3846..00000000 --- a/modules/local/subworkflow/input_check.nf +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Check input samplesheet and get read channels - */ - -params.options = [:] - -include { - SAMPLESHEET_CHECK; - get_samplesheet_paths } from '../samplesheet_check' addParams( options: params.options ) - -workflow INPUT_CHECK { - take: - samplesheet // file: /path/to/samplesheet.csv - - main: - SAMPLESHEET_CHECK ( samplesheet ) - .splitCsv ( header:true, sep:'\t' ) - .map { get_samplesheet_paths(it) } - .set { reads } - - emit: - reads // channel: [ val(meta), [ reads ] ] -} diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/functions.nf b/modules/nf-core/modules/custom/dumpsoftwareversions/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/nf-core/modules/custom/dumpsoftwareversions/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf b/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf new file mode 100644 index 00000000..faf2073f --- /dev/null +++ b/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf @@ -0,0 +1,106 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CUSTOM_DUMPSOFTWAREVERSIONS { + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } + + // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container + conda (params.enable_conda ? "bioconda::multiqc=1.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" + } + + input: + path versions + + output: + path "software_versions.yml" , emit: yml + path "software_versions_mqc.yml", emit: mqc_yml + path "versions.yml" , emit: versions + + script: + """ + #!/usr/bin/env python + + import yaml + import platform + from textwrap import dedent + + def _make_versions_html(versions): + html = [ + dedent( + '''\\ + + + + + + + + + + ''' + ) + ] + for process, tmp_versions in sorted(versions.items()): + html.append("") + for i, (tool, version) in enumerate(sorted(tmp_versions.items())): + html.append( + dedent( + f'''\\ + + + + + + ''' + ) + ) + html.append("") + html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") + return "\\n".join(html) + + module_versions = {} + module_versions["${getProcessName(task.process)}"] = { + 'python': platform.python_version(), + 'yaml': yaml.__version__ + } + + with open("$versions") as f: + workflow_versions = yaml.load(f, Loader=yaml.BaseLoader) | module_versions + + workflow_versions["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version" + } + + versions_mqc = { + 'id': 'software_versions', + 'section_name': '${workflow.manifest.name} Software Versions', + 'section_href': 'https://github.com/${workflow.manifest.name}', + 'plot_type': 'html', + 'description': 'are collected at run time from the software output.', + 'data': _make_versions_html(workflow_versions) + } + + with open("software_versions.yml", 'w') as f: + yaml.dump(workflow_versions, f, default_flow_style=False) + with open("software_versions_mqc.yml", 'w') as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + + with open('versions.yml', 'w') as f: + yaml.dump(module_versions, f, default_flow_style=False) + """ +} diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml b/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml new file mode 100644 index 00000000..8d4a6ed4 --- /dev/null +++ b/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml @@ -0,0 +1,33 @@ +name: custom_dumpsoftwareversions +description: Custom module used to dump software versions within the nf-core pipeline template +keywords: + - custom + - version +tools: + - custom: + description: Custom module used to dump software versions within the nf-core pipeline template + homepage: https://github.com/nf-core/tools + documentation: https://github.com/nf-core/tools + +input: + - versions: + type: file + description: YML file containing software versions + pattern: "*.yml" + +output: + - yml: + type: file + description: Standard YML file containing software versions + pattern: "software_versions.yml" + - mqc_yml: + type: file + description: MultiQC custom content YML file containing software versions + pattern: "software_versions_mqc.yml" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@drpatelh" diff --git a/nextflow.config b/nextflow.config index ce73d890..5d63349b 100644 --- a/nextflow.config +++ b/nextflow.config @@ -36,7 +36,7 @@ params { digest_mass_range = "800:2500" enzyme = 'unspecific cleavage' fdr_threshold = 0.01 - fdr_level = 'peptide-level-fdrs' + fdr_level = 'peptide_level_fdrs' fixed_mods = '' fragment_bin_offset = 0 fragment_mass_tolerance = 0.02 @@ -203,7 +203,7 @@ manifest { description = 'Identify and quantify peptides from mass spectrometry raw data' mainScript = 'main.nf' nextflowVersion = '>=21.04.0' - version = '2.0.1dev' + version = '2.1.0dev' } // Function to ensure that resource requirements don't go beyond @@ -237,4 +237,4 @@ def check_max(obj, type) { return obj } } -} \ No newline at end of file +} diff --git a/nextflow_schema.json b/nextflow_schema.json index 58a46ace..ea4489ad 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -210,12 +210,12 @@ "properties": { "fdr_level": { "type": "string", - "default": "peptide-level-fdrs", + "default": "peptide_level_fdrs", "description": "Specify the level at which the false discovery rate should be computed.", "enum": [ - "peptide-level-fdrs", + "peptide_level_fdrs", "psm-level-fdrs", - "protein-level-fdrs" + "protein_level_fdrs" ] }, "fdr_threshold": { @@ -384,6 +384,7 @@ "type": "boolean", "description": "Skip MultiQC.", "fa_icon": "fas fa-fast-forward", + "hidden": true, "default": true }, "email_on_fail": { diff --git a/subworkflow/local/input_check.nf b/subworkflow/local/input_check.nf new file mode 100644 index 00000000..1c3ccc2e --- /dev/null +++ b/subworkflow/local/input_check.nf @@ -0,0 +1,40 @@ +// +// Check input samplesheet and get read channels +// + +params.options = [:] + +include { SAMPLESHEET_CHECK } from '../../modules/local/samplesheet_check' addParams(options: [:]) + +workflow INPUT_CHECK { + take: + samplesheet // file: /path/to/samplesheet.csv + + main: + SAMPLESHEET_CHECK ( samplesheet ) + .csv + .splitCsv ( header:true, sep:"\t" ) + .map { get_samplesheet_paths(it) } + .set { reads } + + emit: + reads // channel: [ val(meta), [ reads ] ] + versions = SAMPLESHEET_CHECK.out.versions // channel: [ versions.yml ] +} + +// Function to get list of [ meta, filenames ] +def get_samplesheet_paths(LinkedHashMap row) { + def meta = [:] + meta.id = row.ID + meta.sample = row.Sample + meta.condition = row.Condition + meta.ext = row.FileExt + + def array = [] + if (!file(row.Filename).exists()) { + exit 1, "ERROR: Please check input samplesheet -> MS file does not exist!\n${row.Filename}" + } else { + array = [ meta, file(row.Filename) ] + } + return array +} diff --git a/modules/local/subworkflow/refine_fdr_on_predicted_subset.nf b/subworkflow/local/refine_fdr_on_predicted_subset.nf similarity index 65% rename from modules/local/subworkflow/refine_fdr_on_predicted_subset.nf rename to subworkflow/local/refine_fdr_on_predicted_subset.nf index 1dab5184..dbdee596 100644 --- a/modules/local/subworkflow/refine_fdr_on_predicted_subset.nf +++ b/subworkflow/local/refine_fdr_on_predicted_subset.nf @@ -18,12 +18,12 @@ def filter_refined_qvalue_options = params.filter_options.clone() filter_psms_options.suffix = "pred_filtered" filter_refined_qvalue_options.suffix = "perc_subset_filtered" -include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPERC } from '../openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged_psm_perc_filtered" ] ) -include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPSM } from '../openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged" ] ) -include { PREDICT_PSMS } from '../predict_psms' addParams( options: [:] ) -include { OPENMS_PERCOLATORADAPTER } from '../openms_percolatoradapter' addParams( options: percolator_adapter_options ) -include { OPENMS_IDFILTER as OPENMS_IDFILTER_PSMS } from '../openms_idfilter' addParams( options: filter_psms_options ) -include { OPENMS_IDFILTER as OPENMS_IDFILTER_REFINED } from '../openms_idfilter' addParams( options: filter_refined_qvalue_options ) +include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPERC } from '../../modules/local/openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged_psm_perc_filtered" ] ) +include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPSM } from '../../modules/local/openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged" ] ) +include { PREDICT_PSMS } from '../../modules/local/predict_psms' addParams( options: [:] ) +include { OPENMS_PERCOLATORADAPTER } from '../../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_options ) +include { OPENMS_IDFILTER as OPENMS_IDFILTER_PSMS } from '../../modules/local/openms_idfilter' addParams( options: filter_psms_options ) +include { OPENMS_IDFILTER as OPENMS_IDFILTER_REFINED } from '../../modules/local/openms_idfilter' addParams( options: filter_refined_qvalue_options ) workflow REFINE_FDR_ON_PREDICTED_SUBSET { // Define the input parameters @@ -36,9 +36,10 @@ workflow REFINE_FDR_ON_PREDICTED_SUBSET { ch_software_versions = Channel.empty() // Export filtered percolator results as mztab OPENMS_MZTABEXPORTERPERC( filtered_perc_output ) - ch_software_versions = ch_software_versions.mix(OPENMS_MZTABEXPORTERPERC.out.version.first().ifEmpty(null)) + ch_versions = ch_versions.mix(OPENMS_MZTABEXPORTERPERC.out.versions) // Export psm results as mztab OPENMS_MZTABEXPORTERPSM( psm_features ) + ch_versions = ch_versions.mix(OPENMS_MZTABEXPORTERPSM.out.versions) // Predict psm results using mhcflurry to shrink search space PREDICT_PSMS( OPENMS_MZTABEXPORTERPERC.out.mztab @@ -47,15 +48,19 @@ workflow REFINE_FDR_ON_PREDICTED_SUBSET { .combine( classI_alleles, by:0) .map(it -> [it[1], it[2], it[3], it[4]]) ) + ch_versions = ch_versions.mix(PREDICT_PSMS.out.versions) // Filter psm results by shrinked search space OPENMS_IDFILTER_PSMS(psm_features.combine( PREDICT_PSMS.out.idxml, by: [0] )) + ch_versions = ch_versions.mix(OPENMS_IDFILTER_PSMS.out.versions) // Recompute percolator fdr on shrinked search space OPENMS_PERCOLATORADAPTER( OPENMS_IDFILTER_PSMS.out.idxml ) + ch_versions = ch_versions.mix(OPENMS_PERCOLATORADAPTER.out.versions) // Filter results by refined fdr OPENMS_IDFILTER_REFINED(OPENMS_PERCOLATORADAPTER.out.idxml.flatMap { it -> [tuple(it[0], it[1], null)]}) + ch_versions = ch_versions.mix(OPENMS_IDFILTER_REFINED.out.versions) emit: // Define the information that is returned by this workflow filter_refined_q_value = OPENMS_IDFILTER_REFINED.out.idxml - version = ch_software_versions + versions = ch_software_versions } diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index 62eef1c8..71cae29d 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -9,9 +9,11 @@ https://github.com/nf-core/mhcquant ---------------------------------------------------------------------------------------- */ -//////////////////////////////////////////////////// -/* -- VALIDATE INPUTS -- */ -//////////////////////////////////////////////////// +/* +======================================================================================== + VALIDATE INPUTS +======================================================================================== +*/ def summary_params = NfcoreSchema.paramsSummaryMap(workflow, params) // Validate input parameters @@ -71,25 +73,24 @@ rm_precursor = params.remove_precursor_peak ? '-remove_precursor_peak true' : '' fdr_level = (params.fdr_level == 'psm-level-fdrs') ? '' : '-'+params.fdr_level fdr_adj_threshold = (params.fdr_threshold == '0.01') ? '0.05' : params.fdr_threshold -//////////////////////////////////////////////////// -/* -- IMPORT LOCAL MODULES/SUBWORKFLOWS -- */ -//////////////////////////////////////////////////// +/* +======================================================================================== + IMPORT LOCAL MODULES/SUBWORKFLOWS +======================================================================================== +*/ +// Don't overwrite global params.modules, create a copy instead and use that within the main script. def modules = params.modules.clone() def openms_map_aligner_identification_options = modules['openms_map_aligner_identification'] def openms_comet_adapter_options = modules['openms_comet_adapter'] - def generate_proteins_from_vcf_options = modules['generate_proteins_from_vcf'] - def percolator_adapter_options = modules['percolator_adapter'] - def id_filter_options = modules['id_filter'] def id_filter_for_alignment_options = id_filter_options.clone() def id_filter_whitelist_options = modules['id_filter_whitelist'] id_filter_options.args += " -score:pep " + params.fdr_threshold id_filter_for_alignment_options.args += " -score:pep " + fdr_adj_threshold - openms_comet_adapter_options.args += x_ions + z_ions + c_ions + a_ions + NL_ions + rm_precursor generate_proteins_from_vcf_options.args += variant_indel_filter + variant_snp_filter + variant_frameshift_filter percolator_adapter_options.args += fdr_level @@ -103,7 +104,7 @@ id_filter_qvalue_options.suffix = "filtered" include { hasExtension } from '../modules/local/functions' -include { INPUT_CHECK } from '../modules/local/subworkflow/input_check' addParams( options: [:] ) +include { INPUT_CHECK } from '../subworkflow/local/input_check' addParams( options: [:] ) include { GENERATE_PROTEINS_FROM_VCF } from '../modules/local/generate_proteins_from_vcf' addParams( options: generate_proteins_from_vcf_options ) include { OPENMS_DECOYDATABASE } from '../modules/local/openms_decoydatabase' addParams( options: [:] ) include { OPENMS_THERMORAWFILEPARSER } from '../modules/local/openms_thermorawfileparser' addParams( options: [:] ) @@ -124,7 +125,7 @@ include { OPENMS_PSMFEATUREEXTRACTOR } from '../modules/loc include { OPENMS_PERCOLATORADAPTER } from '../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_options ) include { OPENMS_PERCOLATORADAPTER as OPENMS_PERCOLATORADAPTER_KLAMMER } from '../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_klammer_options ) -include { REFINE_FDR_ON_PREDICTED_SUBSET } from '../modules/local/subworkflow/refine_fdr_on_predicted_subset' addParams( run_percolator_options : percolator_adapter_options, filter_options: id_filter_options, whitelist_filter_options: id_filter_whitelist_options) +include { REFINE_FDR_ON_PREDICTED_SUBSET } from '../subworkflow/local/refine_fdr_on_predicted_subset' addParams( run_percolator_options : percolator_adapter_options, filter_options: id_filter_options, whitelist_filter_options: id_filter_whitelist_options) include { OPENMS_FEATUREFINDERIDENTIFICATION } from '../modules/local/openms_featurefinderidentification' addParams( options: [:] ) include { OPENMS_FEATURELINKERUNLABELEDKD } from '../modules/local/openms_featurelinkerunlabeledkd' addParams( options: [:] ) @@ -149,19 +150,22 @@ include { OPENMS_RTMODEL } from '../modules/loc include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_FOUND_PEPTIDES} from '../modules/local/openms_rtpredict' addParams( options: [suffix:"_id_files_for_rt_prediction_RTpredicted"] ) include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_NEOEPITOPES} from '../modules/local/openms_rtpredict' addParams( options: [suffix:"_txt_file_for_rt_prediction_RTpredicted"] ) -include { GET_SOFTWARE_VERSIONS } from '../modules/local/get_software_versions' addParams( options: [publish_files : ['tsv':'']] ) +include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/modules/custom/dumpsoftwareversions/main' addParams( options: [publish_files : ['_versions.yml':'']] ) //////////////////////////////////////////////////// /* -- CREATE CHANNELS -- */ //////////////////////////////////////////////////// -params.summary_params = [:] - +// params.summary_params = [:] +include { SAMPLESHEET_CHECK } from '../modules/local/samplesheet_check' addParams( ) //////////////////////////////////////////////////// /* -- RUN MAIN WORKFLOW -- */ //////////////////////////////////////////////////// workflow MHCQUANT { + ch_versions = Channel.empty() + INPUT_CHECK( params.input ) + .reads .set { ch_samples_from_sheet } ch_samples_from_sheet @@ -174,6 +178,8 @@ workflow MHCQUANT { other : true } .set { ms_files } + ch_versions = ch_versions.mix(INPUT_CHECK.out.versions) + // Input fasta file Channel.fromPath( params.fasta ) .combine( ch_samples_from_sheet ) @@ -181,7 +187,6 @@ workflow MHCQUANT { .ifEmpty { exit 1, "params.fasta was empty - no input file supplied" } .set { input_fasta } - ch_software_versions = Channel.empty() // A warning message will be given when the format differs from the '.raw' or '.mzML' extention ms_files.other.subscribe { row -> log.warn("Unknown format for entry " + row[3] + " in provided sample sheet, line will be ignored."); exit 1 } @@ -193,6 +198,7 @@ workflow MHCQUANT { .map(it -> [it[1], it[2], it[3]]) // If specified translate variants to proteins and include in reference fasta GENERATE_PROTEINS_FROM_VCF( ch_vcf ) + // ch_versions = ch_versions.mix(GENERATE_PROTEINS_FROM_VCF.out.versions.first().ifEmpty(null)) ch_fasta_file = GENERATE_PROTEINS_FROM_VCF.out.vcf_fasta } else { ch_fasta_file = input_fasta @@ -201,6 +207,7 @@ workflow MHCQUANT { if (!params.skip_decoy_generation) { // Generate reversed decoy database OPENMS_DECOYDATABASE(ch_fasta_file) + ch_versions = ch_versions.mix(OPENMS_DECOYDATABASE.out.versions.first().ifEmpty(null)) ch_decoy_db = OPENMS_DECOYDATABASE.out.decoy } else { ch_decoy_db = ch_fasta_file @@ -208,11 +215,12 @@ workflow MHCQUANT { // Raw file conversion OPENMS_THERMORAWFILEPARSER(ms_files.raw) - ch_software_versions = ch_software_versions.mix(OPENMS_THERMORAWFILEPARSER.out.version.first().ifEmpty(null)) + ch_versions = ch_versions.mix(OPENMS_THERMORAWFILEPARSER.out.versions.first().ifEmpty(null)) if ( params.run_centroidisation ) { // Optional: Run Peak Picking as Preprocessing OPENMS_PEAKPICKERHIRES(ms_files.mzml) + ch_versions = ch_versions.mix(OPENMS_PEAKPICKERHIRES.out.versions.first().ifEmpty(null)) ch_mzml_file = OPENMS_PEAKPICKERHIRES.out.mzml } else { ch_mzml_file = ms_files.mzml @@ -223,18 +231,20 @@ workflow MHCQUANT { OPENMS_THERMORAWFILEPARSER.out.mzml .mix(ch_mzml_file) .join(ch_decoy_db, remainder:true)) - ch_software_versions = ch_software_versions.mix(OPENMS_COMETADAPTER.out.version.first().ifEmpty(null)) + ch_versions = ch_versions.mix(OPENMS_COMETADAPTER.out.versions.first().ifEmpty(null)) // Index decoy and target hits OPENMS_PEPTIDEINDEXER(OPENMS_COMETADAPTER.out.idxml.join(ch_decoy_db)) - ch_software_versions = ch_software_versions.mix(OPENMS_PEPTIDEINDEXER.out.version.first().ifEmpty(null)) + ch_versions = ch_versions.mix(OPENMS_PEPTIDEINDEXER.out.versions.first().ifEmpty(null)) if(!params.skip_quantification) { // Calculate fdr for id based alignment OPENMS_FALSEDISCOVERYRATE(OPENMS_PEPTIDEINDEXER.out.idxml) + ch_versions = ch_versions.mix(OPENMS_FALSEDISCOVERYRATE.out.versions.first().ifEmpty(null)) // Filter fdr for id based alignment OPENMS_IDFILTER_FOR_ALIGNMENT(OPENMS_FALSEDISCOVERYRATE.out.idxml .flatMap { it -> [tuple(it[0], it[1], null)]}) + ch_versions = ch_versions.mix(OPENMS_IDFILTER_FOR_ALIGNMENT.out.versions.first().ifEmpty(null)) ch_grouped_fdr_filtered = OPENMS_IDFILTER_FOR_ALIGNMENT.out.idxml .map { @@ -245,7 +255,7 @@ workflow MHCQUANT { // Compute alignment rt transformatio OPENMS_MAPALIGNERIDENTIFICATION(ch_grouped_fdr_filtered) - + ch_versions = ch_versions.mix(OPENMS_MAPALIGNERIDENTIFICATION.out.versions.first().ifEmpty(null)) // Intermediate step to join RT transformation files with mzml and idxml channels ms_files.mzml .mix(OPENMS_THERMORAWFILEPARSER.out.mzml) @@ -273,8 +283,10 @@ workflow MHCQUANT { // Align mzML files using trafoXMLs OPENMS_MAPRTTRANSFORMERMZML(joined_trafos_mzmls) + ch_versions = ch_versions.mix(OPENMS_MAPRTTRANSFORMERMZML.out.versions.first().ifEmpty(null)) // Align unfiltered idXMLfiles using trafoXMLs OPENMS_MAPRTTRANSFORMERIDXML(joined_trafos_ids) + ch_versions = ch_versions.mix(OPENMS_MAPRTTRANSFORMERIDXML.out.versions.first().ifEmpty(null)) ch_proceeding_idx = OPENMS_MAPRTTRANSFORMERIDXML.out.aligned .map { meta, raw -> @@ -293,20 +305,25 @@ workflow MHCQUANT { // Merge aligned idXMLfiles OPENMS_IDMERGER(ch_proceeding_idx) + ch_versions = ch_versions.mix(OPENMS_IDMERGER.out.versions.first().ifEmpty(null)) // Extract PSM features for Percolator OPENMS_PSMFEATUREEXTRACTOR(OPENMS_IDMERGER.out.idxml) + ch_versions = ch_versions.mix(OPENMS_PSMFEATUREEXTRACTOR.out.versions.first().ifEmpty(null)) // Run Percolator if (params.description_correct_features > 0 && params.klammer) { OPENMS_PERCOLATORADAPTER_KLAMMER(OPENMS_PSMFEATUREEXTRACTOR.out.idxml) + ch_versions = ch_versions.mix(OPENMS_PERCOLATORADAPTER_KLAMMER.out.versions.first().ifEmpty(null)) ch_percolator_adapter_outcome = OPENMS_PERCOLATORADAPTER_KLAMMER.out.idxml } else { OPENMS_PERCOLATORADAPTER(OPENMS_PSMFEATUREEXTRACTOR.out.idxml) + ch_versions = ch_versions.mix(OPENMS_PERCOLATORADAPTER.out.versions.first().ifEmpty(null)) ch_percolator_adapter_outcome = OPENMS_PERCOLATORADAPTER.out.idxml } // Filter by percolator q-value OPENMS_IDFILTER_Q_VALUE(ch_percolator_adapter_outcome.flatMap { it -> [tuple(it[0], it[1], null)]}) + ch_versions = ch_versions.mix(OPENMS_IDFILTER_Q_VALUE.out.versions.first().ifEmpty(null)) // Refine_fdr_on_predicted_subset if ( params.refine_fdr_on_predicted_subset && params.predict_class_1 ) { @@ -316,6 +333,7 @@ workflow MHCQUANT { OPENMS_PSMFEATUREEXTRACTOR.out.idxml, peptides_class_1_alleles ) + ch_versions = ch_versions.mix(REFINE_FDR_ON_PREDICTED_SUBSET.out.versions.first().ifEmpty(null)) // Define the outcome of the paramer to a fixed variable filter_q_value = REFINE_FDR_ON_PREDICTED_SUBSET.out.filter_refined_q_value.flatMap { it -> [ tuple(it[0].sample, it[0], it[1]) ] } @@ -334,6 +352,7 @@ workflow MHCQUANT { .set{ joined_mzmls_ids_quant } // Quantify identifications using targeted feature extraction OPENMS_FEATUREFINDERIDENTIFICATION(joined_mzmls_ids_quant) + ch_versions = ch_versions.mix(OPENMS_FEATUREFINDERIDENTIFICATION.out.versions.first().ifEmpty(null)) // Link extracted features OPENMS_FEATURELINKERUNLABELEDKD( OPENMS_FEATUREFINDERIDENTIFICATION.out.featurexml @@ -342,8 +361,10 @@ workflow MHCQUANT { [[[id:meta.sample + "_" + meta.condition, sample:meta.sample, condition:meta.condition, ext:meta.ext], raw]] } .groupTuple(by:[0])) + ch_versions = ch_versions.mix(OPENMS_FEATURELINKERUNLABELEDKD.out.versions.first().ifEmpty(null)) // Resolve conflicting ids matching to the same feature OPENMS_IDCONFLICTRESOLVER(OPENMS_FEATURELINKERUNLABELEDKD.out.consensusxml) + ch_versions = ch_versions.mix(OPENMS_IDCONFLICTRESOLVER.out.versions.first().ifEmpty(null)) // Assign the outcome of the id conflict resolver as export content export_content = OPENMS_IDCONFLICTRESOLVER.out.consensusxml } else { @@ -366,11 +387,11 @@ workflow MHCQUANT { .combine( peptides_class_1_alleles, by:0) .map( it -> [it[1], it[2], it[3]]) ) - - ch_software_versions = ch_software_versions.mix(PREDICT_PEPTIDES_MHCFLURRY_CLASS_1.out.version.first().ifEmpty(null)) + ch_versions = ch_versions.mix(PREDICT_PEPTIDES_MHCFLURRY_CLASS_1.out.versions.first().ifEmpty(null)) if ( params.include_proteins_from_vcf ) { // Predict all possible neoepitopes from vcf PREDICT_POSSIBLE_NEOEPITOPES(peptides_class_1_alleles.join(ch_vcf_from_sheet, by:0, remainder:true)) + ch_versions = ch_versions.mix(PREDICT_POSSIBLE_NEOEPITOPES.out.versions.first().ifEmpty(null)) ch_predicted_possible_neoepitopes = PREDICT_POSSIBLE_NEOEPITOPES.out.csv // Resolve found neoepitopes RESOLVE_FOUND_NEOEPITOPES( @@ -379,8 +400,10 @@ workflow MHCQUANT { .combine( ch_predicted_possible_neoepitopes, by:0, remainder:true) .map( it -> [it[1], it[2], it[3]]) ) + ch_versions = ch_versions.mix(RESOLVE_FOUND_NEOEPITOPES.out.versions.first().ifEmpty(null)) // Predict class 1 neoepitopes MHCFlurry PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1(peptides_class_1_alleles.join(RESOLVE_FOUND_NEOEPITOPES.out.csv, by:0)) + ch_versions = ch_versions.mix(PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1.out.versions.first().ifEmpty(null)) } } @@ -388,6 +411,7 @@ workflow MHCQUANT { if ( params.predict_class_2 & !params.skip_quantification ) { // Preprocess found peptides for MHCNuggets prediction class 2 PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2(OPENMS_MZTABEXPORTER.out.mztab) + ch_versions = ch_versions.mix(PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) // Predict found peptides using MHCNuggets class 2 PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2( PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2.out.preprocessed @@ -395,12 +419,15 @@ workflow MHCQUANT { .join(peptides_class_2_alleles, by:0) .map( it -> [it[1], it[2], it[3]]) ) + ch_versions = ch_versions.mix(PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) // Postprocess predicted MHCNuggets peptides class 2 POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2( PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2.out.csv.join(PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2.out.geneID, by:0) ) + ch_versions = ch_versions.mix(POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) if ( params.include_proteins_from_vcf ) { // Predict all possible class 2 neoepitopes from vcf PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES(peptides_class_2_alleles.join(ch_vcf_from_sheet, by:0, remainder:true)) + ch_versions = ch_versions.mix(PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES.out.versions.first().ifEmpty(null)) ch_predicted_possible_neoepitopes_II = PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES.out.csv // Resolve found class 2 neoepitopes RESOLVE_FOUND_CLASS_2_NEOEPITOPES( @@ -408,20 +435,16 @@ workflow MHCQUANT { .map{ it -> [it[0].sample, it[1]] } .combine( ch_predicted_possible_neoepitopes_II, by:0, remainder:true) ) + ch_versions = ch_versions.mix(RESOLVE_FOUND_CLASS_2_NEOEPITOPES.out.versions.first().ifEmpty(null)) // Preprocess resolved neoepitopes in a format that MHCNuggets understands PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2(RESOLVE_FOUND_CLASS_2_NEOEPITOPES.out.csv) + ch_versions = ch_versions.mix(PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) // Predict class 2 MHCNuggets PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2(PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.preprocessed.join(peptides_class_2_alleles, by:0)) + ch_versions = ch_versions.mix(PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) // Class 2 MHCNuggets Postprocessing POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2(RESOLVE_FOUND_CLASS_2_NEOEPITOPES.out.csv.join(PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.csv, by:0)) - // If there was no prediction performed on class 1 - if ( !params.predict_class_1 ) { - // Add information to software versions - ch_software_versions = ch_software_versions.mix(PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES.out.version.ifEmpty(null)) - } - } else if (!params.include_proteins_from_vcf && !params.predict_class_1) { - // Add the information to software versions - ch_software_versions = ch_software_versions.mix(PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2.out.version.first().ifEmpty(null)) + ch_versions = ch_versions.mix(POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) } } ////////////////////////////////////////////////////////////////////////////////////////////// @@ -429,26 +452,23 @@ workflow MHCQUANT { filter_q_value = filter_q_value.map{ it -> [it[1], it[2]] } // Train Retention Times Predictor OPENMS_RTMODEL(filter_q_value) + ch_versions = ch_versions.mix(OPENMS_RTMODEL.out.versions.first().ifEmpty(null)) // Retention Times Predictor Found Peptides OPENMS_RTPREDICT_FOUND_PEPTIDES(filter_q_value.join(OPENMS_RTMODEL.out.complete, by:[0])) + ch_versions = ch_versions.mix(OPENMS_RTPREDICT_FOUND_PEPTIDES.out.versions.first().ifEmpty(null)) // Retention Times Predictor possible Neoepitopes OPENMS_RTPREDICT_NEOEPITOPES(ch_predicted_possible_neoepitopes.mix(ch_predicted_possible_neoepitopes_II).join(OPENMS_RTMODEL.out.complete, by:[0])) + ch_versions = ch_versions.mix(OPENMS_RTPREDICT_FOUND_PEPTIDES.out.versions.first().ifEmpty(null)) } - /* - * MODULE: Pipeline reporting - */ - ch_software_versions - .map { it -> if (it) [ it.baseName, it ] } - .groupTuple() - .map { it[1][0] } - .flatten() - .collect() - .set { ch_software_versions } - - GET_SOFTWARE_VERSIONS ( - ch_software_versions.map { it }.collect() + // + // MODULE: Pipeline reporting + // + + CUSTOM_DUMPSOFTWAREVERSIONS ( + ch_versions.unique().collectFile() ) + } //////////////////////////////////////////////////// From 190061e404c9d35426c7634bef933cf150564929 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 25 Oct 2021 18:04:33 +0200 Subject: [PATCH 008/227] Perform linting --- .markdownlint.yml | 2 -- lib/NfcoreTemplate.groovy | 22 +++++++++++----------- 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/.markdownlint.yml b/.markdownlint.yml index e7fc97a7..9e605fcf 100644 --- a/.markdownlint.yml +++ b/.markdownlint.yml @@ -12,5 +12,3 @@ no-inline-html: - kbd - details - summary -single-title: - level: 2 diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index cd64d8b7..44551e0a 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -72,17 +72,17 @@ class NfcoreTemplate { misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] - email_fields['version'] = workflow.manifest.version - email_fields['runName'] = workflow.runName - email_fields['success'] = workflow.success - email_fields['dateComplete'] = workflow.complete - email_fields['duration'] = workflow.duration - email_fields['exitStatus'] = workflow.exitStatus - email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') - email_fields['errorReport'] = (workflow.errorReport ?: 'None') - email_fields['commandLine'] = workflow.commandLine - email_fields['projectDir'] = workflow.projectDir - email_fields['summary'] = summary << misc_fields + email_fields['version'] = workflow.manifest.version + email_fields['runName'] = workflow.runName + email_fields['success'] = workflow.success + email_fields['dateComplete'] = workflow.complete + email_fields['duration'] = workflow.duration + email_fields['exitStatus'] = workflow.exitStatus + email_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + email_fields['errorReport'] = (workflow.errorReport ?: 'None') + email_fields['commandLine'] = workflow.commandLine + email_fields['projectDir'] = workflow.projectDir + email_fields['summary'] = summary << misc_fields // On success try attach the multiqc report def mqc_report = null From ef73709367d4b59c0cd41ac4824949d21c6df111 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 25 Oct 2021 18:10:22 +0200 Subject: [PATCH 009/227] Format the changelog --- CHANGELOG.md | 118 ++++++++++++++++++++++++++------------------------- 1 file changed, 60 insertions(+), 58 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ce21ccc..3b9c4c0e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,27 +3,29 @@ ## v2.1.0 nf-core/mhcquant "Olive Tin Hamster" - 2021/10/25 ### `Added` -* Inclusion of assets/schema_input.json + +- Inclusion of assets/schema_input.json ### `Fixed` -* Fixed typos -* [#165] - Raise memory requirements of FeatureFinderIdentification step -* [#176] - Pipeline crashes when setting the --skip_quantification flag + +- Fixed typos +- [#165] - Raise memory requirements of FeatureFinderIdentification step +- [#176] - Pipeline crashes when setting the --skip_quantification flag + ### `Dependencies` Note, since the pipeline is now using Nextflow DSL2, each process will be run with its own [Biocontainer](https://biocontainers.pro/#/registry). This means that on occasion it is entirely possible for the pipeline to be using different versions of the same tool. However, the overall software dependency changes compared to the last release have been listed below for reference. - | Dependency | Old version | New version | -|-----------------------|-------------|-------------| +| --------------------- | ----------- | ----------- | | `openms` | 2.5.0 | 2.6.0 | | `openms-thirdparty` | 2.5.0 | 2.6.0 | | `thermorawfileparser` | 1.2.3 | 1.3.4 | | `mhcflurry` | 1.4.3 | 2.0.1 | -> **NB:** Dependency has been __updated__ if both old and new version information is present. -> **NB:** Dependency has been __added__ if just the new version information is present. -> **NB:** Dependency has been __removed__ if version information isn't present. +> **NB:** Dependency has been **updated** if both old and new version information is present. +> **NB:** Dependency has been **added** if just the new version information is present. +> **NB:** Dependency has been **removed** if version information isn't present. ### `Deprecated` @@ -49,14 +51,14 @@ DSL1 to DSL2 conversion ### `Added` -- Template raise to 1.10.2 -- Added parameter json schema -- Added full size AWS test profile -- Included new parameters for Neutral loss and precursor ion inclusion +- Template raise to 1.10.2 +- Added parameter json schema +- Added full size AWS test profile +- Included new parameters for Neutral loss and precursor ion inclusion ### `Fixed` -- Changed trigger for AWS tests +- Changed trigger for AWS tests ### `Dependencies` @@ -68,8 +70,8 @@ DSL1 to DSL2 conversion ### `Fixed` -- set optimal config for cluster execution -- fix duplication of ids / mixing of channels +- set optimal config for cluster execution +- fix duplication of ids / mixing of channels ### `Dependencies` @@ -79,15 +81,15 @@ DSL1 to DSL2 conversion ### `Added` -- integrate sample, allele and vcf sheets instead of file dirs -- branched mzML/raw input -- introduce param to skip quantification +- integrate sample, allele and vcf sheets instead of file dirs +- branched mzML/raw input +- introduce param to skip quantification ### `Fixed` -- raise OpenMS version to 2.5 -- adapt workflow accoringly with new options -- remove specifying input as file dirs eg "data/*.mzML" +- raise OpenMS version to 2.5 +- adapt workflow accoringly with new options +- remove specifying input as file dirs eg "data/\*.mzML" ### `Dependencies` @@ -97,17 +99,17 @@ DSL1 to DSL2 conversion ### `Added` -- Raw File Reading -- RT prediction -- Quantification FDR -- Variant pass filter -- nf-core template update 1.8 and 1.9 -- Added version numbers of mhcnuggets and Fred2 +- Raw File Reading +- RT prediction +- Quantification FDR +- Variant pass filter +- nf-core template update 1.8 and 1.9 +- Added version numbers of mhcnuggets and Fred2 ### `Fixed` -- output file order in intermediate results -- increased run times for MS search and variant translation +- output file order in intermediate results +- increased run times for MS search and variant translation ### `Dependencies` @@ -117,15 +119,15 @@ DSL1 to DSL2 conversion ### `Added` -- nf-core template update -- x,z,a,c ions -- quantification fdri +- nf-core template update +- x,z,a,c ions +- quantification fdri ### `Fixed` -- empty neoepitope list bugs fixed -- documentation -- scrape version numbers +- empty neoepitope list bugs fixed +- documentation +- scrape version numbers ### `Dependencies` @@ -135,9 +137,9 @@ DSL1 to DSL2 conversion ### `Added` -- MHCnugget predictor -- Few fixes -- RT features for percolator +- MHCnugget predictor +- Few fixes +- RT features for percolator ### `Fixed` @@ -151,8 +153,8 @@ DSL1 to DSL2 conversion ### `Fixed` -- linear retention time alignment -- refine_fdr README +- linear retention time alignment +- refine_fdr README ### `Dependencies` @@ -164,8 +166,8 @@ DSL1 to DSL2 conversion ### `Fixed` -- sort channels by basename -- fixed psm-level-fdrs +- sort channels by basename +- fixed psm-level-fdrs ### `Dependencies` @@ -177,7 +179,7 @@ DSL1 to DSL2 conversion ### `Fixed` -- fixed refine_fdr_on_predicted_subset float error +- fixed refine_fdr_on_predicted_subset float error ### `Dependencies` @@ -187,7 +189,7 @@ DSL1 to DSL2 conversion ### `Fixed` -- filter out uncommon aminoacids U,X,B,J,Z +- filter out uncommon aminoacids U,X,B,J,Z ## v1.2.2 nf-core/mhcquant "Golden Eagle" - 2019/01/24 @@ -195,9 +197,9 @@ DSL1 to DSL2 conversion ### `Fixed` -- default params to false -- change on centroidisation parameter -- small changes on docu +- default params to false +- change on centroidisation parameter +- small changes on docu ### `Dependencies` @@ -209,7 +211,7 @@ DSL1 to DSL2 conversion ### `Fixed` -- process identical names bug +- process identical names bug ### `Dependencies` @@ -219,13 +221,13 @@ DSL1 to DSL2 conversion ### `Added` -- Subset FDR refinement option -- Fred2 dependency -- vcf parser and translation to proteins +- Subset FDR refinement option +- Fred2 dependency +- vcf parser and translation to proteins ### `Fixed` -- Documentation +- Documentation ### `Dependencies` @@ -235,13 +237,13 @@ DSL1 to DSL2 conversion ### `Added` -- optional mhcflurry binding predictions -- peak picking as optional preprocessing step +- optional mhcflurry binding predictions +- peak picking as optional preprocessing step ### `Fixed` -- adapted a few parameters such as the default fdr threshold -- updated documentation +- adapted a few parameters such as the default fdr threshold +- updated documentation ### `Dependencies` @@ -249,4 +251,4 @@ DSL1 to DSL2 conversion ## v1.0.0 nf-core/mhcquant "Naked Chicken" - 2018/11/27 -- Initial release of nf-core/mhcquant, created with the [nf-core](http://nf-co.re/) template. +- Initial release of nf-core/mhcquant, created with the [nf-core](http://nf-co.re/) template. From 06344cdbf1732e53e213371ae459c3d04d4447a3 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 25 Oct 2021 18:15:16 +0200 Subject: [PATCH 010/227] Format the changelog --- CHANGELOG.md | 106 +++++++++++++++++++++++++-------------------------- 1 file changed, 53 insertions(+), 53 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b9c4c0e..7e494c5f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,13 +4,13 @@ ### `Added` -- Inclusion of assets/schema_input.json +- Inclusion of assets/schema_input.json ### `Fixed` -- Fixed typos -- [#165] - Raise memory requirements of FeatureFinderIdentification step -- [#176] - Pipeline crashes when setting the --skip_quantification flag +- Fixed typos +- [#165] - Raise memory requirements of FeatureFinderIdentification step +- [#176] - Pipeline crashes when setting the --skip_quantification flag ### `Dependencies` @@ -51,14 +51,14 @@ DSL1 to DSL2 conversion ### `Added` -- Template raise to 1.10.2 -- Added parameter json schema -- Added full size AWS test profile -- Included new parameters for Neutral loss and precursor ion inclusion +- Template raise to 1.10.2 +- Added parameter json schema +- Added full size AWS test profile +- Included new parameters for Neutral loss and precursor ion inclusion ### `Fixed` -- Changed trigger for AWS tests +- Changed trigger for AWS tests ### `Dependencies` @@ -70,8 +70,8 @@ DSL1 to DSL2 conversion ### `Fixed` -- set optimal config for cluster execution -- fix duplication of ids / mixing of channels +- set optimal config for cluster execution +- fix duplication of ids / mixing of channels ### `Dependencies` @@ -81,15 +81,15 @@ DSL1 to DSL2 conversion ### `Added` -- integrate sample, allele and vcf sheets instead of file dirs -- branched mzML/raw input -- introduce param to skip quantification +- integrate sample, allele and vcf sheets instead of file dirs +- branched mzML/raw input +- introduce param to skip quantification ### `Fixed` -- raise OpenMS version to 2.5 -- adapt workflow accoringly with new options -- remove specifying input as file dirs eg "data/\*.mzML" +- raise OpenMS version to 2.5 +- adapt workflow accoringly with new options +- remove specifying input as file dirs eg "data/\*.mzML" ### `Dependencies` @@ -99,17 +99,17 @@ DSL1 to DSL2 conversion ### `Added` -- Raw File Reading -- RT prediction -- Quantification FDR -- Variant pass filter -- nf-core template update 1.8 and 1.9 -- Added version numbers of mhcnuggets and Fred2 +- Raw File Reading +- RT prediction +- Quantification FDR +- Variant pass filter +- nf-core template update 1.8 and 1.9 +- Added version numbers of mhcnuggets and Fred2 ### `Fixed` -- output file order in intermediate results -- increased run times for MS search and variant translation +- output file order in intermediate results +- increased run times for MS search and variant translation ### `Dependencies` @@ -119,15 +119,15 @@ DSL1 to DSL2 conversion ### `Added` -- nf-core template update -- x,z,a,c ions -- quantification fdri +- nf-core template update +- x,z,a,c ions +- quantification fdri ### `Fixed` -- empty neoepitope list bugs fixed -- documentation -- scrape version numbers +- empty neoepitope list bugs fixed +- documentation +- scrape version numbers ### `Dependencies` @@ -137,9 +137,9 @@ DSL1 to DSL2 conversion ### `Added` -- MHCnugget predictor -- Few fixes -- RT features for percolator +- MHCnugget predictor +- Few fixes +- RT features for percolator ### `Fixed` @@ -153,8 +153,8 @@ DSL1 to DSL2 conversion ### `Fixed` -- linear retention time alignment -- refine_fdr README +- linear retention time alignment +- refine_fdr README ### `Dependencies` @@ -166,8 +166,8 @@ DSL1 to DSL2 conversion ### `Fixed` -- sort channels by basename -- fixed psm-level-fdrs +- sort channels by basename +- fixed psm-level-fdrs ### `Dependencies` @@ -179,7 +179,7 @@ DSL1 to DSL2 conversion ### `Fixed` -- fixed refine_fdr_on_predicted_subset float error +- fixed refine_fdr_on_predicted_subset float error ### `Dependencies` @@ -189,7 +189,7 @@ DSL1 to DSL2 conversion ### `Fixed` -- filter out uncommon aminoacids U,X,B,J,Z +- filter out uncommon aminoacids U,X,B,J,Z ## v1.2.2 nf-core/mhcquant "Golden Eagle" - 2019/01/24 @@ -197,9 +197,9 @@ DSL1 to DSL2 conversion ### `Fixed` -- default params to false -- change on centroidisation parameter -- small changes on docu +- default params to false +- change on centroidisation parameter +- small changes on docu ### `Dependencies` @@ -211,7 +211,7 @@ DSL1 to DSL2 conversion ### `Fixed` -- process identical names bug +- process identical names bug ### `Dependencies` @@ -221,13 +221,13 @@ DSL1 to DSL2 conversion ### `Added` -- Subset FDR refinement option -- Fred2 dependency -- vcf parser and translation to proteins +- Subset FDR refinement option +- Fred2 dependency +- vcf parser and translation to proteins ### `Fixed` -- Documentation +- Documentation ### `Dependencies` @@ -237,13 +237,13 @@ DSL1 to DSL2 conversion ### `Added` -- optional mhcflurry binding predictions -- peak picking as optional preprocessing step +- optional mhcflurry binding predictions +- peak picking as optional preprocessing step ### `Fixed` -- adapted a few parameters such as the default fdr threshold -- updated documentation +- adapted a few parameters such as the default fdr threshold +- updated documentation ### `Dependencies` @@ -251,4 +251,4 @@ DSL1 to DSL2 conversion ## v1.0.0 nf-core/mhcquant "Naked Chicken" - 2018/11/27 -- Initial release of nf-core/mhcquant, created with the [nf-core](http://nf-co.re/) template. +- Initial release of nf-core/mhcquant, created with the [nf-core](http://nf-co.re/) template. From 5189cd01bc2aac99e6ecf826271d0e4ef813eb7b Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 28 Oct 2021 15:37:25 +0200 Subject: [PATCH 011/227] Perform linting --- conf/base.config | 2 +- nextflow_schema.json | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/conf/base.config b/conf/base.config index d3c17b28..821c78cd 100644 --- a/conf/base.config +++ b/conf/base.config @@ -11,7 +11,7 @@ process { - opus = { check_max( 1 * task.attempt, 'cpus' ) } + cpus = { check_max( 1 * task.attempt, 'cpus' ) } memory = { check_max( 6.GB * task.attempt, 'memory' ) } time = { check_max( 4.h * task.attempt, 'time' ) } diff --git a/nextflow_schema.json b/nextflow_schema.json index e32145a3..041a9b61 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -462,6 +462,7 @@ "fa_icon": "fas fa-question-circle", "hidden": true }, + "publish_dir_mode": { "type": "string", "default": "copy", From fafc0be591293a3d42d42fd512fa1c1011cc0369 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 28 Oct 2021 15:37:34 +0200 Subject: [PATCH 012/227] Perform linting --- .../local/refine_fdr_on_predicted_subset.nf | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 subworkflows/local/refine_fdr_on_predicted_subset.nf diff --git a/subworkflows/local/refine_fdr_on_predicted_subset.nf b/subworkflows/local/refine_fdr_on_predicted_subset.nf new file mode 100644 index 00000000..dbdee596 --- /dev/null +++ b/subworkflows/local/refine_fdr_on_predicted_subset.nf @@ -0,0 +1,66 @@ +/* + * Perform an additional step where the process are collected + * that are called when the paramater "refine_fdr_on_predicted_subset" is provided + */ + +// VALIDATED (EQUAL TO THE OLD CODE) + +params.percolator_adapter_options = [:] +params.filter_options = [:] +params.whitelist_filter_options = [:] + +def percolator_adapter_options = params.percolator_adapter_options.clone() +percolator_adapter_options.suffix = "perc_subset" + +def filter_psms_options = params.whitelist_filter_options.clone() +def filter_refined_qvalue_options = params.filter_options.clone() + +filter_psms_options.suffix = "pred_filtered" +filter_refined_qvalue_options.suffix = "perc_subset_filtered" + +include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPERC } from '../../modules/local/openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged_psm_perc_filtered" ] ) +include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPSM } from '../../modules/local/openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged" ] ) +include { PREDICT_PSMS } from '../../modules/local/predict_psms' addParams( options: [:] ) +include { OPENMS_PERCOLATORADAPTER } from '../../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_options ) +include { OPENMS_IDFILTER as OPENMS_IDFILTER_PSMS } from '../../modules/local/openms_idfilter' addParams( options: filter_psms_options ) +include { OPENMS_IDFILTER as OPENMS_IDFILTER_REFINED } from '../../modules/local/openms_idfilter' addParams( options: filter_refined_qvalue_options ) + +workflow REFINE_FDR_ON_PREDICTED_SUBSET { + // Define the input parameters + take: + filtered_perc_output + psm_features + classI_alleles + + main: + ch_software_versions = Channel.empty() + // Export filtered percolator results as mztab + OPENMS_MZTABEXPORTERPERC( filtered_perc_output ) + ch_versions = ch_versions.mix(OPENMS_MZTABEXPORTERPERC.out.versions) + // Export psm results as mztab + OPENMS_MZTABEXPORTERPSM( psm_features ) + ch_versions = ch_versions.mix(OPENMS_MZTABEXPORTERPSM.out.versions) + // Predict psm results using mhcflurry to shrink search space + PREDICT_PSMS( + OPENMS_MZTABEXPORTERPERC.out.mztab + .join( OPENMS_MZTABEXPORTERPSM.out.mztab, by:[0] ) + .map{ it -> [it[0].sample, it[0], it[1], it[2]] } + .combine( classI_alleles, by:0) + .map(it -> [it[1], it[2], it[3], it[4]]) + ) + ch_versions = ch_versions.mix(PREDICT_PSMS.out.versions) + + // Filter psm results by shrinked search space + OPENMS_IDFILTER_PSMS(psm_features.combine( PREDICT_PSMS.out.idxml, by: [0] )) + ch_versions = ch_versions.mix(OPENMS_IDFILTER_PSMS.out.versions) + // Recompute percolator fdr on shrinked search space + OPENMS_PERCOLATORADAPTER( OPENMS_IDFILTER_PSMS.out.idxml ) + ch_versions = ch_versions.mix(OPENMS_PERCOLATORADAPTER.out.versions) + // Filter results by refined fdr + OPENMS_IDFILTER_REFINED(OPENMS_PERCOLATORADAPTER.out.idxml.flatMap { it -> [tuple(it[0], it[1], null)]}) + ch_versions = ch_versions.mix(OPENMS_IDFILTER_REFINED.out.versions) + emit: + // Define the information that is returned by this workflow + filter_refined_q_value = OPENMS_IDFILTER_REFINED.out.idxml + versions = ch_software_versions +} From 0597a62756c80badd123bb5f270b51e6dd1575e2 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 28 Oct 2021 15:52:21 +0200 Subject: [PATCH 013/227] Perform linting --- conf/base.config | 10 +++------- conf/test.config | 9 +++------ conf/test_full.config | 13 +++++-------- modules/local/get_software_versions.nf | 1 + modules/local/samplesheet_check.nf | 1 + 5 files changed, 13 insertions(+), 21 deletions(-) diff --git a/conf/base.config b/conf/base.config index 821c78cd..bf02eda6 100644 --- a/conf/base.config +++ b/conf/base.config @@ -11,19 +11,15 @@ process { - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { check_max( 1 * task.attempt, 'cpus' ) } + memory = { check_max( 4.GB * task.attempt, 'memory' ) } + time = { check_max( 2.h * task.attempt, 'time' ) } errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' // Process-specific resource requirements - // NOTE - Please try and re-use the labels below as much as possible. - // These labels are used and recognised by default in DSL2 files hosted on nf-core/modules. - // If possible, it would be nice to keep the same label naming convention when - // adding in your local modules too. // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors withLabel:process_low { cpus = { check_max( 2 * task.attempt, 'cpus' ) } diff --git a/conf/test.config b/conf/test.config index d34e6e6a..8a830171 100644 --- a/conf/test.config +++ b/conf/test.config @@ -20,10 +20,7 @@ params { max_time = 6.h // Input data - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv' - - // Input data - fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/test.fasta' - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/sample_sheet.tsv' - allele_sheet = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/allele_sheet.tsv' + fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/test.fasta' + input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/sample_sheet.tsv' + allele_sheet = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/allele_sheet.tsv' } diff --git a/conf/test_full.config b/conf/test_full.config index 223210e8..c12dfe1b 100644 --- a/conf/test_full.config +++ b/conf/test_full.config @@ -14,13 +14,10 @@ params { config_profile_name = 'Full test profile' config_profile_description = 'Full test dataset to check pipeline function' - // Input data for full size test - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_full_illumina_amplicon.csv' + predict_class_1 = true - predict_class_1 = true - - // Input data - fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/test.fasta' - input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/sample_sheet_full.tsv' - allele_sheet = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/allele_sheet_full.tsv' + // Input data + fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/test.fasta' + input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/sample_sheet_full.tsv' + allele_sheet = 'https://raw.githubusercontent.com/nf-core/test-datasets/mhcquant/allele_sheet_full.tsv' } diff --git a/modules/local/get_software_versions.nf b/modules/local/get_software_versions.nf index 01220c97..044bf4bd 100644 --- a/modules/local/get_software_versions.nf +++ b/modules/local/get_software_versions.nf @@ -2,6 +2,7 @@ include { saveFiles } from './functions' params.options = [:] +options = initOptions(params.options) process GET_SOFTWARE_VERSIONS { publishDir "${params.outdir}", diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index b2e6ad62..4f681709 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -2,6 +2,7 @@ include { saveFiles } from './functions' params.options = [:] +options = initOptions(params.options) process SAMPLESHEET_CHECK { tag "$samplesheet" From 1f9010b85b88ab100baf23b980372e31f31f1380 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 28 Oct 2021 16:03:56 +0200 Subject: [PATCH 014/227] Perform linting --- assets/multiqc_config.yaml | 2 +- nextflow.config | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/assets/multiqc_config.yaml b/assets/multiqc_config.yaml index a0e5fa3c..76398921 100644 --- a/assets/multiqc_config.yaml +++ b/assets/multiqc_config.yaml @@ -1,7 +1,7 @@ report_comment: > This report has been generated by the nf-core/mhcquant analysis pipeline. For information about how to interpret these results, please see the - documentation. + documentation. report_section_order: software_versions: order: -1000 diff --git a/nextflow.config b/nextflow.config index 02e4cfed..95c6c17b 100644 --- a/nextflow.config +++ b/nextflow.config @@ -81,6 +81,7 @@ params { // Boilerplate options outdir = './results' tracedir = "${params.outdir}/pipeline_info" + cpus = process.cpus publish_dir_mode = 'copy' email = null email_on_fail = null From 8c0bbb86bc536c555688dfcda7226f0039db7a1a Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 28 Oct 2021 17:17:48 +0200 Subject: [PATCH 015/227] Include the multiQC file again --- CHANGELOG.md | 1 + conf/modules.config | 6 ++++++ lib/WorkflowMhcquant.groovy | 27 ++++++++++++++++++++++++ modules/local/samplesheet_check.nf | 12 ++++++++--- nextflow.config | 9 ++++---- nextflow_schema.json | 2 +- workflows/mhcquant.nf | 33 +++++++++++++++++++++--------- 7 files changed, 71 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e494c5f..921dd4dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### `Added` - Inclusion of assets/schema_input.json +- Added the multiQC again to report the versions ### `Fixed` diff --git a/conf/modules.config b/conf/modules.config index 43d40c6f..0c837d89 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -22,6 +22,12 @@ params { modules { + + 'multiqc' { + args = '' + publish_dir = "multiqc/" + } + 'openms_map_aligner_identification' { args = "-model:type linear -algorithm:max_rt_shift ${params.max_rt_alignment_shift} " } diff --git a/lib/WorkflowMhcquant.groovy b/lib/WorkflowMhcquant.groovy index f3b4e82f..b9450083 100644 --- a/lib/WorkflowMhcquant.groovy +++ b/lib/WorkflowMhcquant.groovy @@ -76,4 +76,31 @@ class WorkflowMhcquant { "===================================================================================" } + // + // Get workflow summary for MultiQC + // + public static String paramsSummaryMultiqc(workflow, summary) { + String summary_section = '' + for (group in summary.keySet()) { + def group_params = summary.get(group) // This gets the parameters of that particular group + if (group_params) { + summary_section += "

$group

\n" + summary_section += "
\n" + for (param in group_params.keySet()) { + summary_section += "
$param
${group_params.get(param) ?: 'N/A'}
\n" + } + summary_section += "
\n" + } + } + + String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" + yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" + yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" + yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" + yaml_file_text += "plot_type: 'html'\n" + yaml_file_text += "data: |\n" + yaml_file_text += "${summary_section}" + return yaml_file_text + } + } diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index 4f681709..47c3cf0e 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { saveFiles } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -18,15 +18,21 @@ process SAMPLESHEET_CHECK { } input: - path samplesheet + path samplesheet output: - path '*.csv' + path '*.csv', emit: csv + path "versions.yml", emit: versions script: // This script is bundled with the pipeline, in nf-core/mhcquant/bin/ """ check_samplesheet.py \\ $samplesheet \\ samplesheet.valid.csv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + python: \$(python --version | sed 's/Python //g') + END_VERSIONS """ } diff --git a/nextflow.config b/nextflow.config index 95c6c17b..2a624f12 100644 --- a/nextflow.config +++ b/nextflow.config @@ -17,9 +17,9 @@ params { publish_dir_mode = 'copy' // References - //genome = null - //igenomes_base = 's3://ngi-igenomes/igenomes' - //igenomes_ignore = true + genomes = null + igenomes_base = 's3://ngi-igenomes/igenomes' + igenomes_ignore = true // Workflow options allele_sheet = false @@ -27,7 +27,7 @@ params { predict_class_1 = false predict_class_2 = false refine_fdr_on_predicted_subset = false - schema_ignore_params = 'genomes,input_paths' + schema_ignore_params = 'genome,input_paths' skip_decoy_generation = false subset_affinity_threshold = 500 variant_annotation_style = "SNPEFF" @@ -81,7 +81,6 @@ params { // Boilerplate options outdir = './results' tracedir = "${params.outdir}/pipeline_info" - cpus = process.cpus publish_dir_mode = 'copy' email = null email_on_fail = null diff --git a/nextflow_schema.json b/nextflow_schema.json index 041a9b61..0fc13666 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -20,7 +20,7 @@ "help_text": "Use this to specify a sample sheet table including your input raw or mzml files as well as their meta information such as SampleID and Condition. For example:\n\n| ID | Sample | Condition | ReplicateFileName |\n| -----|:------------:| ----------:|------------------------------------------:|\n| 1 | MM15_Melanom | A | data/MM15_Melanom_W_1_A_standard.raw |\n| 2 | MM15_Melanom | B | data/MM15_Melanom_W_1_B_standard.raw |\n| 3 | MM17_Melanom | B | data/MM17_Melanom_W_1_B_standard.raw |\n\n```bash\n--input 'path/samples.tsv'\n```", "format": "file-path", "mimetype": "text/csv", - "pattern": "^\\S+\\.csv$", + "pattern": "^\\S+\\.tsv$", "schema": "assets/schema_input.json", "fa_icon": "fas fa-file-csv" }, diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index 04e3a9b2..bf78f45a 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -62,6 +62,16 @@ rm_precursor = params.remove_precursor_peak ? '-remove_precursor_peak true' : '' fdr_level = (params.fdr_level == 'psm-level-fdrs') ? '' : '-'+params.fdr_level fdr_adj_threshold = (params.fdr_threshold == '0.01') ? '0.05' : params.fdr_threshold +/* +======================================================================================== + CONFIG FILES +======================================================================================== +*/ + +ch_multiqc_config = file("$projectDir/assets/multiqc_config.yaml", checkIfExists: true) +ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config) : Channel.empty() + + /* ======================================================================================== IMPORT LOCAL MODULES/SUBWORKFLOWS @@ -70,6 +80,9 @@ fdr_adj_threshold = (params.fdr_threshold == '0.01') ? '0.05' : params.fdr_thres // Don't overwrite global params.modules, create a copy instead and use that within the main script. def modules = params.modules.clone() +def multiqc_options = modules['multiqc'] +multiqc_options.args += params.multiqc_title ? Utils.joinModuleArgs(["--title \"$params.multiqc_title\""]) : '' + def openms_map_aligner_identification_options = modules['openms_map_aligner_identification'] def openms_comet_adapter_options = modules['openms_comet_adapter'] def generate_proteins_from_vcf_options = modules['generate_proteins_from_vcf'] @@ -93,7 +106,7 @@ id_filter_qvalue_options.suffix = "filtered" include { hasExtension } from '../modules/local/functions' -include { INPUT_CHECK } from '../subworkflow/local/input_check' addParams( options: [:] ) +include { INPUT_CHECK } from '../subworkflows/local/input_check' addParams( options: [:] ) include { GENERATE_PROTEINS_FROM_VCF } from '../modules/local/generate_proteins_from_vcf' addParams( options: generate_proteins_from_vcf_options ) include { OPENMS_DECOYDATABASE } from '../modules/local/openms_decoydatabase' addParams( options: [:] ) include { OPENMS_THERMORAWFILEPARSER } from '../modules/local/openms_thermorawfileparser' addParams( options: [:] ) @@ -114,7 +127,7 @@ include { OPENMS_PSMFEATUREEXTRACTOR } from '../modules/loc include { OPENMS_PERCOLATORADAPTER } from '../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_options ) include { OPENMS_PERCOLATORADAPTER as OPENMS_PERCOLATORADAPTER_KLAMMER } from '../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_klammer_options ) -include { REFINE_FDR_ON_PREDICTED_SUBSET } from '../subworkflow/local/refine_fdr_on_predicted_subset' addParams( run_percolator_options : percolator_adapter_options, filter_options: id_filter_options, whitelist_filter_options: id_filter_whitelist_options) +include { REFINE_FDR_ON_PREDICTED_SUBSET } from '../subworkflows/local/refine_fdr_on_predicted_subset' addParams( run_percolator_options : percolator_adapter_options, filter_options: id_filter_options, whitelist_filter_options: id_filter_whitelist_options) include { OPENMS_FEATUREFINDERIDENTIFICATION } from '../modules/local/openms_featurefinderidentification' addParams( options: [:] ) include { OPENMS_FEATURELINKERUNLABELEDKD } from '../modules/local/openms_featurelinkerunlabeledkd' addParams( options: [:] ) @@ -140,6 +153,7 @@ include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_FOUND_PEPTIDES} from '../mod include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_NEOEPITOPES} from '../modules/local/openms_rtpredict' addParams( options: [suffix:"_txt_file_for_rt_prediction_RTpredicted"] ) include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/modules/custom/dumpsoftwareversions/main' addParams( options: [publish_files : ['_versions.yml':'']] ) +include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' addParams( options: multiqc_options ) //////////////////////////////////////////////////// /* -- CREATE CHANNELS -- */ @@ -453,18 +467,17 @@ workflow MHCQUANT { // // MODULE: Pipeline reporting // - ch_software_versions + ch_versions .map { it -> if (it) [ it.baseName, it ] } .groupTuple() .map { it[1][0] } .flatten() .collect() - .set { ch_software_versions } + .set { ch_versions } - GET_SOFTWARE_VERSIONS ( - ch_software_versions.map { it }.collect() + CUSTOM_DUMPSOFTWAREVERSIONS ( + ch_versions.unique().collectFile() ) - // // MODULE: MultiQC // @@ -475,14 +488,14 @@ workflow MHCQUANT { ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_config)) ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_custom_config.collect().ifEmpty([])) ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) - ch_multiqc_files = ch_multiqc_files.mix(GET_SOFTWARE_VERSIONS.out.yaml.collect()) - ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([])) + ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) + // ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([])) MULTIQC ( ch_multiqc_files.collect() ) multiqc_report = MULTIQC.out.report.toList() - ch_software_versions = ch_software_versions.mix(MULTIQC.out.version.ifEmpty(null)) + ch_versions = ch_versions.mix(MULTIQC.out.version.ifEmpty(null)) } /* From 2e6a73bed37778031ede74078aad5fd0da0359dc Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 28 Oct 2021 17:18:22 +0200 Subject: [PATCH 016/227] Update the changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 921dd4dd..a2d09691 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ ### `Added` - Inclusion of assets/schema_input.json -- Added the multiQC again to report the versions +- Added the multiQC again to report the versions45 ### `Fixed` From 82f22374403f91786929fd261efd966fef940300 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 28 Oct 2021 17:21:25 +0200 Subject: [PATCH 017/227] Fix template file of multiqc_config.yml --- assets/multiqc_config.yaml | 2 +- nextflow.config | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/assets/multiqc_config.yaml b/assets/multiqc_config.yaml index 76398921..a0e5fa3c 100644 --- a/assets/multiqc_config.yaml +++ b/assets/multiqc_config.yaml @@ -1,7 +1,7 @@ report_comment: > This report has been generated by the nf-core/mhcquant analysis pipeline. For information about how to interpret these results, please see the - documentation. + documentation. report_section_order: software_versions: order: -1000 diff --git a/nextflow.config b/nextflow.config index 2a624f12..326fc666 100644 --- a/nextflow.config +++ b/nextflow.config @@ -18,8 +18,8 @@ params { // References genomes = null - igenomes_base = 's3://ngi-igenomes/igenomes' - igenomes_ignore = true + //igenomes_base = 's3://ngi-igenomes/igenomes' + //igenomes_ignore = true // Workflow options allele_sheet = false From 49bd14788ec53591e74d698a82b4acb3c9faedf0 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Fri, 5 Nov 2021 14:42:36 +0100 Subject: [PATCH 018/227] Rename modules, include the version check and multiQC reporting --- CHANGELOG.md | 7 +- ... => mhcflurry_predictneoepitopesclass1.nf} | 8 +- ....nf => mhcflurry_predictpeptidesclass1.nf} | 10 +- ...edict_psms.nf => mhcflurry_predictpsms.nf} | 8 +- ...nf => mhcnuggets_neoepitopesclass2post.nf} | 6 +- ....nf => mhcnuggets_neoepitopesclass2pre.nf} | 4 +- ..._2.nf => mhcnuggets_peptidesclass2post.nf} | 6 +- ...s_2.nf => mhcnuggets_peptidesclass2pre.nf} | 4 +- ...=> mhcnuggets_predictneoepitopesclass2.nf} | 6 +- ...nf => mhcnuggets_predictpeptidesclass2.nf} | 10 +- .../predict_possible_class_2_neoepitopes.nf | 8 +- modules/local/predict_possible_neoepitopes.nf | 4 +- .../resolve_found_class_2_neoepitopes.nf | 4 +- modules/local/resolve_found_neoepitopes.nf | 4 +- modules/local/samplesheet_check.nf | 6 +- nextflow.config | 1 + nextflow_schema.json | 2 +- .../local/refine_fdr_on_predicted_subset.nf | 2 +- workflows/mhcquant.nf | 156 ++++++++---------- 19 files changed, 124 insertions(+), 132 deletions(-) rename modules/local/{predict_neoepitopes_mhcflurry_class_1.nf => mhcflurry_predictneoepitopesclass1.nf} (86%) rename modules/local/{predict_peptides_mhcflurry_class_1.nf => mhcflurry_predictpeptidesclass1.nf} (78%) rename modules/local/{predict_psms.nf => mhcflurry_predictpsms.nf} (87%) rename modules/local/{postprocess_neoepitopes_mhcnuggets_class_2.nf => mhcnuggets_neoepitopesclass2post.nf} (82%) rename modules/local/{preprocess_neoepitopes_mhcnuggets_class_2.nf => mhcnuggets_neoepitopesclass2pre.nf} (83%) rename modules/local/{postprocess_peptides_mhcnuggets_class_2.nf => mhcnuggets_peptidesclass2post.nf} (84%) rename modules/local/{preprocess_peptides_mhcnuggets_class_2.nf => mhcnuggets_peptidesclass2pre.nf} (84%) rename modules/local/{predict_neoepitopes_mhcnuggets_class_2.nf => mhcnuggets_predictneoepitopesclass2.nf} (81%) rename modules/local/{predict_peptides_mhcnuggets_class_2.nf => mhcnuggets_predictpeptidesclass2.nf} (74%) diff --git a/CHANGELOG.md b/CHANGELOG.md index a2d09691..af0c27f6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,12 @@ # nf-core/mhcquant: Changelog -## v2.1.0 nf-core/mhcquant "Olive Tin Hamster" - 2021/10/25 +## v2.1.0 nf-core/mhcquant "Olive Tin Hamster" - 2021/MM/DD ### `Added` - Inclusion of assets/schema_input.json -- Added the multiQC again to report the versions45 +- Added the multiQC again to report the versions +- MHCquant parameters are now directly assigned to the argument of the ### `Fixed` @@ -22,7 +23,7 @@ Note, since the pipeline is now using Nextflow DSL2, each process will be run wi | `openms` | 2.5.0 | 2.6.0 | | `openms-thirdparty` | 2.5.0 | 2.6.0 | | `thermorawfileparser` | 1.2.3 | 1.3.4 | -| `mhcflurry` | 1.4.3 | 2.0.1 | + > **NB:** Dependency has been **updated** if both old and new version information is present. > **NB:** Dependency has been **added** if just the new version information is present. diff --git a/modules/local/predict_neoepitopes_mhcflurry_class_1.nf b/modules/local/mhcflurry_predictneoepitopesclass1.nf similarity index 86% rename from modules/local/predict_neoepitopes_mhcflurry_class_1.nf rename to modules/local/mhcflurry_predictneoepitopesclass1.nf index 071b7008..9f76d4cd 100644 --- a/modules/local/predict_neoepitopes_mhcflurry_class_1.nf +++ b/modules/local/mhcflurry_predictneoepitopesclass1.nf @@ -4,7 +4,7 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -process PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1 { +process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { tag "$meta" label 'process_low' @@ -12,11 +12,11 @@ process PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_1_bindings', publish_id:'class_1_bindings') } - conda (params.enable_conda ? "bioconda::mhcflurry=2.0.1" : null) + conda (params.enable_conda ? "bioconda::mhcflurry=1.4.3" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mhcflurry:2.0.1--pyh864c0ab_0" + container "https://depot.galaxyproject.org/singularity/mhcflurry:1.4.3--py_0" } else { - container "quay.io/biocontainers/mhcflurry:2.0.1--pyh864c0ab_0" + container "quay.io/biocontainers/mhcflurry:1.4.3--py_0" } diff --git a/modules/local/predict_peptides_mhcflurry_class_1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf similarity index 78% rename from modules/local/predict_peptides_mhcflurry_class_1.nf rename to modules/local/mhcflurry_predictpeptidesclass1.nf index deaed97a..1c122662 100644 --- a/modules/local/predict_peptides_mhcflurry_class_1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -4,10 +4,10 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSIONFRED2 = '2.0.6' -def VERSIONMHCNUGGETS = '2.3.2' +// def VERSIONFRED2 = '2.0.6' +// def VERSIONMHCNUGGETS = '2.3.2' -process PREDICT_PEPTIDES_MHCFLURRY_CLASS_1 { +process MHCFLURRY_PREDICTPEPTIDESCLASS1 { tag "$meta" label 'process_low' @@ -39,8 +39,8 @@ process PREDICT_PEPTIDES_MHCFLURRY_CLASS_1 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) - mhcnuggets: \$(echo $VERSIONMHCNUGGETS) - FRED2: \$(echo $VERSIONFRED2) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + FRED2: \$(echo \$(python -c "import pkg_resources; print 'fred2 ' + pkg_resources.get_distribution('Fred2').version" | sed 's/^fred2 //; s/ .*\$//')) END_VERSIONS """ } diff --git a/modules/local/predict_psms.nf b/modules/local/mhcflurry_predictpsms.nf similarity index 87% rename from modules/local/predict_psms.nf rename to modules/local/mhcflurry_predictpsms.nf index bfa144c7..bda370fe 100644 --- a/modules/local/predict_psms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -4,7 +4,7 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -process PREDICT_PSMS { +process MHCFLURRY_PREDICTPSMS { tag "$meta" label 'process_medium' @@ -12,11 +12,11 @@ process PREDICT_PSMS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::mhcflurry=2.0.1" : null) + conda (params.enable_conda ? "bioconda::mhcflurry=1.4.3" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mhcflurry:2.0.1--pyh864c0ab_0" + container "https://depot.galaxyproject.org/singularity/mhcflurry:1.4.3--py_0" } else { - container "quay.io/biocontainers/mhcflurry:2.0.1--pyh864c0ab_0" + container "quay.io/biocontainers/mhcflurry:1.4.3--py_0" } input: diff --git a/modules/local/postprocess_neoepitopes_mhcnuggets_class_2.nf b/modules/local/mhcnuggets_neoepitopesclass2post.nf similarity index 82% rename from modules/local/postprocess_neoepitopes_mhcnuggets_class_2.nf rename to modules/local/mhcnuggets_neoepitopesclass2post.nf index 22442cf8..09f35777 100644 --- a/modules/local/postprocess_neoepitopes_mhcnuggets_class_2.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2post.nf @@ -4,9 +4,9 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSION = '2.3.2' +// def VERSION = '2.3.2' -process POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 { +process MHCNUGGETS_NEOEPITOPESCLASS2POST { tag "$meta" label 'process_low' @@ -36,7 +36,7 @@ process POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcnuggets: \$(echo $VERSION) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) END_VERSIONS """ } diff --git a/modules/local/preprocess_neoepitopes_mhcnuggets_class_2.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf similarity index 83% rename from modules/local/preprocess_neoepitopes_mhcnuggets_class_2.nf rename to modules/local/mhcnuggets_neoepitopesclass2pre.nf index 96aa2125..c6057248 100644 --- a/modules/local/preprocess_neoepitopes_mhcnuggets_class_2.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -6,7 +6,7 @@ options = initOptions(params.options) def VERSION = '2.3.2' -process PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 { +process MHCNUGGETS_NEOEPITOPESCLASS2RE { tag "$meta" label 'process_low' @@ -32,7 +32,7 @@ process PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcnuggets: \$(echo $VERSION) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) END_VERSIONS """ } diff --git a/modules/local/postprocess_peptides_mhcnuggets_class_2.nf b/modules/local/mhcnuggets_peptidesclass2post.nf similarity index 84% rename from modules/local/postprocess_peptides_mhcnuggets_class_2.nf rename to modules/local/mhcnuggets_peptidesclass2post.nf index 4fb76980..d598ed36 100644 --- a/modules/local/postprocess_peptides_mhcnuggets_class_2.nf +++ b/modules/local/mhcnuggets_peptidesclass2post.nf @@ -4,9 +4,9 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSION = '2.3.2' +// def VERSION = '2.3.2' -process POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 { +process MHCNUGGETS_PEPTIDESCLASS2POST { tag "$meta" label 'process_low' @@ -36,7 +36,7 @@ process POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcnuggets: \$(echo $VERSION) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) END_VERSIONS """ } diff --git a/modules/local/preprocess_peptides_mhcnuggets_class_2.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf similarity index 84% rename from modules/local/preprocess_peptides_mhcnuggets_class_2.nf rename to modules/local/mhcnuggets_peptidesclass2pre.nf index 1bbc52df..d3a18d01 100644 --- a/modules/local/preprocess_peptides_mhcnuggets_class_2.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -6,7 +6,7 @@ options = initOptions(params.options) def VERSION = '2.3.2' -process PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 { +process MHCNUGGETS_PEPTIDESCLASS2PRE { tag "$meta" label 'process_low' @@ -33,7 +33,7 @@ process PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcnuggets: \$(echo $VERSION) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) END_VERSIONS """ } diff --git a/modules/local/predict_neoepitopes_mhcnuggets_class_2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf similarity index 81% rename from modules/local/predict_neoepitopes_mhcnuggets_class_2.nf rename to modules/local/mhcnuggets_predictneoepitopesclass2.nf index 33baad40..be6d9180 100644 --- a/modules/local/predict_neoepitopes_mhcnuggets_class_2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -4,9 +4,9 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSION = '2.3.2' +// def VERSION = '2.3.2' -process PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2 { +process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { tag "$meta" label 'process_low' @@ -32,7 +32,7 @@ process PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcnuggets: \$(echo $VERSION) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) END_VERSIONS """ } diff --git a/modules/local/predict_peptides_mhcnuggets_class_2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf similarity index 74% rename from modules/local/predict_peptides_mhcnuggets_class_2.nf rename to modules/local/mhcnuggets_predictpeptidesclass2.nf index f33931f3..a6bae12f 100644 --- a/modules/local/predict_peptides_mhcnuggets_class_2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -4,10 +4,10 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSIONFRED2 = '2.0.6' -def VERSIONMHCNUGGETS = '2.3.2' +// def VERSIONFRED2 = '2.0.6' +// def VERSIONMHCNUGGETS = '2.3.2' -process PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2 { +process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { tag "$meta" label 'process_low' @@ -33,8 +33,8 @@ process PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcnuggets: \$(echo $VERSIONMHCNUGGETS) - FRED2: \$(echo $VERSIONFRED2) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/local/predict_possible_class_2_neoepitopes.nf b/modules/local/predict_possible_class_2_neoepitopes.nf index 05fe85e6..97426d67 100644 --- a/modules/local/predict_possible_class_2_neoepitopes.nf +++ b/modules/local/predict_possible_class_2_neoepitopes.nf @@ -4,8 +4,8 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSIONFRED2 = '2.0.6' -def VERSIONMHCNUGGETS = '2.3.2' +// def VERSIONFRED2 = '2.0.6' +// def VERSIONMHCNUGGETS = '2.3.2' process PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES { tag "$meta" @@ -39,8 +39,8 @@ process PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) - mhcnuggets: \$(echo $VERSIONMHCNUGGETS) - FRED2: \$(echo $VERSIONFRED2) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/local/predict_possible_neoepitopes.nf b/modules/local/predict_possible_neoepitopes.nf index 5c3ac414..b2c85d2c 100644 --- a/modules/local/predict_possible_neoepitopes.nf +++ b/modules/local/predict_possible_neoepitopes.nf @@ -39,8 +39,8 @@ process PREDICT_POSSIBLE_NEOEPITOPES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) - mhcnuggets: \$(echo $VERSIONMHCNUGGETS) - FRED2: \$(echo $VERSIONFRED2) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/local/resolve_found_class_2_neoepitopes.nf b/modules/local/resolve_found_class_2_neoepitopes.nf index 0aec6214..85a0dbcd 100644 --- a/modules/local/resolve_found_class_2_neoepitopes.nf +++ b/modules/local/resolve_found_class_2_neoepitopes.nf @@ -40,8 +40,8 @@ process RESOLVE_FOUND_CLASS_2_NEOEPITOPES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) - mhcnuggets: \$(echo $VERSIONMHCNUGGETS) - FRED2: \$(echo $VERSIONFRED2) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/local/resolve_found_neoepitopes.nf b/modules/local/resolve_found_neoepitopes.nf index 2ee16c1b..a13ed77a 100644 --- a/modules/local/resolve_found_neoepitopes.nf +++ b/modules/local/resolve_found_neoepitopes.nf @@ -38,8 +38,8 @@ process RESOLVE_FOUND_NEOEPITOPES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) - mhcnuggets: \$(echo $VERSIONMHCNUGGETS) - FRED2: \$(echo $VERSIONFRED2) + mhcnuggets: \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//') + FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index 47c3cf0e..5c846333 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -31,8 +31,8 @@ process SAMPLESHEET_CHECK { samplesheet.valid.csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - python: \$(python --version | sed 's/Python //g') - END_VERSIONS + ${getProcessName(task.process)}: + python: \$(echo \$(python --version | sed 's/Python //g')) + END_VERSIONS """ } diff --git a/nextflow.config b/nextflow.config index 326fc666..138c3a52 100644 --- a/nextflow.config +++ b/nextflow.config @@ -37,6 +37,7 @@ params { variant_snp_filter = false // MultiQC options + skip_multiqc = false multiqc_config = null multiqc_title = null max_multiqc_email_size = '25.MB' diff --git a/nextflow_schema.json b/nextflow_schema.json index 0fc13666..31e20e0c 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -372,7 +372,7 @@ "description": "Skip MultiQC.", "fa_icon": "fas fa-fast-forward", "hidden": true, - "default": true + "default": false }, "custom_config_base": { "type": "string", diff --git a/subworkflows/local/refine_fdr_on_predicted_subset.nf b/subworkflows/local/refine_fdr_on_predicted_subset.nf index dbdee596..d39e1f4f 100644 --- a/subworkflows/local/refine_fdr_on_predicted_subset.nf +++ b/subworkflows/local/refine_fdr_on_predicted_subset.nf @@ -20,7 +20,7 @@ filter_refined_qvalue_options.suffix = "perc_subset_filtered" include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPERC } from '../../modules/local/openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged_psm_perc_filtered" ] ) include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPSM } from '../../modules/local/openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged" ] ) -include { PREDICT_PSMS } from '../../modules/local/predict_psms' addParams( options: [:] ) +include { MHCFLURRY_PREDICTPSMS } from '../../modules/local/mhcflurry_predictpsms' addParams( options: [:] ) include { OPENMS_PERCOLATORADAPTER } from '../../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_options ) include { OPENMS_IDFILTER as OPENMS_IDFILTER_PSMS } from '../../modules/local/openms_idfilter' addParams( options: filter_psms_options ) include { OPENMS_IDFILTER as OPENMS_IDFILTER_REFINED } from '../../modules/local/openms_idfilter' addParams( options: filter_refined_qvalue_options ) diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index bf78f45a..de46bdd0 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -48,20 +48,6 @@ if (params.include_proteins_from_vcf) { .set { ch_vcf_from_sheet } } -if (params.variant_indel_filter) { variant_indel_filter="-fINDEL" } else { variant_indel_filter="" } -if (params.variant_frameshift_filter) { variant_frameshift_filter="-fFS" } else { variant_frameshift_filter="" } -if (params.variant_snp_filter) { variant_snp_filter="-fSNP" } else { variant_snp_filter="" } - -// Mass Spectronomy data processing options -x_ions = params.use_x_ions ? '-use_X_ions true' : '' -z_ions = params.use_z_ions ? '-use_Z_ions true' : '' -a_ions = params.use_a_ions ? '-use_A_ions true' : '' -c_ions = params.use_c_ions ? '-use_C_ions true' : '' -NL_ions = params.use_NL_ions ? '-use_NL_ions true' : '' -rm_precursor = params.remove_precursor_peak ? '-remove_precursor_peak true' : '' -fdr_level = (params.fdr_level == 'psm-level-fdrs') ? '' : '-'+params.fdr_level -fdr_adj_threshold = (params.fdr_threshold == '0.01') ? '0.05' : params.fdr_threshold - /* ======================================================================================== CONFIG FILES @@ -92,10 +78,19 @@ def id_filter_for_alignment_options = id_filter_options.clone() def id_filter_whitelist_options = modules['id_filter_whitelist'] id_filter_options.args += " -score:pep " + params.fdr_threshold -id_filter_for_alignment_options.args += " -score:pep " + fdr_adj_threshold -openms_comet_adapter_options.args += x_ions + z_ions + c_ions + a_ions + NL_ions + rm_precursor -generate_proteins_from_vcf_options.args += variant_indel_filter + variant_snp_filter + variant_frameshift_filter -percolator_adapter_options.args += fdr_level +id_filter_for_alignment_options.args += " -score:pep " + (params.fdr_threshold == '0.01') ? Utils.joinModuleArgs(['-score:pep 0.05']) : Utils.joinModuleArgs(['-score:pep ' + params.fdr_threshold]) +openms_comet_adapter_options.args += params.use_x_ions ? Utils.joinModuleArgs(['-use_X_ions true']) : '' +openms_comet_adapter_options.args += params.use_z_ions ? Utils.joinModuleArgs(['-use_Z_ions true']) : '' +openms_comet_adapter_options.args += params.use_a_ions ? Utils.joinModuleArgs(['-use_A_ions true']) : '' +openms_comet_adapter_options.args += params.use_c_ions ? Utils.joinModuleArgs(['-use_C_ions true']) : '' +openms_comet_adapter_options.args += params.use_NL_ions ? Utils.joinModuleArgs(['-use_NL_ions true']) : '' +openms_comet_adapter_options.args += params.remove_precursor_peak ? Utils.joinModuleArgs(['-remove_precursor_peak yes']) : '' + +generate_proteins_from_vcf_options.args += params.variant_indel_filter ? Utils.joinModuleArgs(['-fINDEL']) : '' +generate_proteins_from_vcf_options.args += params.variant_frameshift_filter ? Utils.joinModuleArgs(['-fFS']) : '' +generate_proteins_from_vcf_options.args += params.variant_snp_filter ? Utils.joinModuleArgs(['-fSNP']) : '' +percolator_adapter_options.args += (params.fdr_level != 'psm-level-fdrs') ? Utils.joinModuleArgs(['-'+params.fdr_level]) : '' + percolator_adapter_options.suffix = "all_ids_merged_psm_perc" def percolator_adapter_klammer_options = percolator_adapter_options.clone() @@ -104,9 +99,12 @@ percolator_adapter_klammer_options.args += " -klammer" def id_filter_qvalue_options = id_filter_options.clone() id_filter_qvalue_options.suffix = "filtered" +//////////////////////////////////////////////////// +/* -- CREATE CHANNELS -- */ +//////////////////////////////////////////////////// include { hasExtension } from '../modules/local/functions' -include { INPUT_CHECK } from '../subworkflows/local/input_check' addParams( options: [:] ) +include { INPUT_CHECK } from '../subworkflows/local/input_check' addParams( options: [:] ) include { GENERATE_PROTEINS_FROM_VCF } from '../modules/local/generate_proteins_from_vcf' addParams( options: generate_proteins_from_vcf_options ) include { OPENMS_DECOYDATABASE } from '../modules/local/openms_decoydatabase' addParams( options: [:] ) include { OPENMS_THERMORAWFILEPARSER } from '../modules/local/openms_thermorawfileparser' addParams( options: [:] ) @@ -127,7 +125,7 @@ include { OPENMS_PSMFEATUREEXTRACTOR } from '../modules/loc include { OPENMS_PERCOLATORADAPTER } from '../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_options ) include { OPENMS_PERCOLATORADAPTER as OPENMS_PERCOLATORADAPTER_KLAMMER } from '../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_klammer_options ) -include { REFINE_FDR_ON_PREDICTED_SUBSET } from '../subworkflows/local/refine_fdr_on_predicted_subset' addParams( run_percolator_options : percolator_adapter_options, filter_options: id_filter_options, whitelist_filter_options: id_filter_whitelist_options) +include { REFINE_FDR_ON_PREDICTED_SUBSET } from '../subworkflows/local/refine_fdr_on_predicted_subset' addParams( run_percolator_options : percolator_adapter_options, filter_options: id_filter_options, whitelist_filter_options: id_filter_whitelist_options) include { OPENMS_FEATUREFINDERIDENTIFICATION } from '../modules/local/openms_featurefinderidentification' addParams( options: [:] ) include { OPENMS_FEATURELINKERUNLABELEDKD } from '../modules/local/openms_featurelinkerunlabeledkd' addParams( options: [:] ) @@ -135,31 +133,26 @@ include { OPENMS_IDCONFLICTRESOLVER } from '../modules/loc include { OPENMS_TEXTEXPORTER } from '../modules/local/openms_textexporter' addParams( options: [:] ) include { OPENMS_MZTABEXPORTER } from '../modules/local/openms_mztabexporter' addParams( options: [:] ) -include { PREDICT_PEPTIDES_MHCFLURRY_CLASS_1 } from '../modules/local/predict_peptides_mhcflurry_class_1' addParams( options: [:] ) -include { PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 } from '../modules/local/preprocess_peptides_mhcnuggets_class_2' addParams( options: [:] ) -include { PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2 } from '../modules/local/predict_peptides_mhcnuggets_class_2' addParams( options: [:] ) -include { POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2 } from '../modules/local/postprocess_peptides_mhcnuggets_class_2' addParams( options: [:] ) +include { MHCFLURRY_PREDICTPEPTIDESCLASS1 } from '../modules/local/mhcflurry_predictpeptidesclass1' addParams( options: [:] ) +include { MHCNUGGETS_PEPTIDESCLASS2PRE } from '../modules/local/mhcnuggets_peptidesclass2pre' addParams( options: [:] ) +include { MHCNUGGETS_PREDICTPEPTIDESCLASS2 } from '../modules/local/mhcnuggets_predictpeptidesclass2' addParams( options: [:] ) +include { MHCNUGGETS_PEPTIDESCLASS2POST } from '../modules/local/mhcnuggets_peptidesclass2post' addParams( options: [:] ) include { PREDICT_POSSIBLE_NEOEPITOPES } from '../modules/local/predict_possible_neoepitopes' addParams( options: [:] ) include { PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES } from '../modules/local/predict_possible_class_2_neoepitopes' addParams( options: [:] ) include { RESOLVE_FOUND_NEOEPITOPES } from '../modules/local/resolve_found_neoepitopes' addParams( options: [:] ) include { RESOLVE_FOUND_CLASS_2_NEOEPITOPES } from '../modules/local/resolve_found_class_2_neoepitopes' addParams( options: [:] ) -include { PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1 } from '../modules/local/predict_neoepitopes_mhcflurry_class_1' addParams( options: [:] ) -include { PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 } from '../modules/local/preprocess_neoepitopes_mhcnuggets_class_2' addParams( options: [:] ) -include { PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2 } from '../modules/local/predict_neoepitopes_mhcnuggets_class_2' addParams( options: [:] ) -include { POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2 } from '../modules/local/postprocess_neoepitopes_mhcnuggets_class_2' addParams( options: [:] ) +include { MHCFLURRY_PREDICTNEOEPITOPESCLASS1 } from '../modules/local/mhcflurry_predictneoepitopesclass1' addParams( options: [:] ) +include { MHCNUGGETS_NEOEPITOPESCLASS2RE } from '../modules/local/mhcnuggets_neoepitopesclass2pre' addParams( options: [:] ) +include { MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 } from '../modules/local/mhcnuggets_predictneoepitopesclass2' addParams( options: [:] ) +include { MHCNUGGETS_NEOEPITOPESCLASS2POST } from '../modules/local/mhcnuggets_neoepitopesclass2post' addParams( options: [:] ) include { OPENMS_RTMODEL } from '../modules/local/openms_rtmodel' addParams( options: [:] ) include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_FOUND_PEPTIDES} from '../modules/local/openms_rtpredict' addParams( options: [suffix:"_id_files_for_rt_prediction_RTpredicted"] ) include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_NEOEPITOPES} from '../modules/local/openms_rtpredict' addParams( options: [suffix:"_txt_file_for_rt_prediction_RTpredicted"] ) include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/modules/custom/dumpsoftwareversions/main' addParams( options: [publish_files : ['_versions.yml':'']] ) -include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' addParams( options: multiqc_options ) +include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' addParams( options: multiqc_options ) -//////////////////////////////////////////////////// -/* -- CREATE CHANNELS -- */ -//////////////////////////////////////////////////// -// params.summary_params = [:] -include { SAMPLESHEET_CHECK } from '../modules/local/samplesheet_check' addParams( ) //////////////////////////////////////////////////// /* -- RUN MAIN WORKFLOW -- */ //////////////////////////////////////////////////// @@ -181,7 +174,6 @@ workflow MHCQUANT { other : true } .set { ms_files } - ch_versions = ch_versions.mix(INPUT_CHECK.out.versions) // Input fasta file Channel.fromPath( params.fasta ) @@ -259,6 +251,7 @@ workflow MHCQUANT { // Compute alignment rt transformatio OPENMS_MAPALIGNERIDENTIFICATION(ch_grouped_fdr_filtered) ch_versions = ch_versions.mix(OPENMS_MAPALIGNERIDENTIFICATION.out.versions.first().ifEmpty(null)) + // TODO: Why are there 5 versions printed? // Intermediate step to join RT transformation files with mzml and idxml channels ms_files.mzml .mix(OPENMS_THERMORAWFILEPARSER.out.mzml) @@ -369,28 +362,30 @@ workflow MHCQUANT { OPENMS_IDCONFLICTRESOLVER(OPENMS_FEATURELINKERUNLABELEDKD.out.consensusxml) ch_versions = ch_versions.mix(OPENMS_IDCONFLICTRESOLVER.out.versions.first().ifEmpty(null)) // Assign the outcome of the id conflict resolver as export content - export_content = OPENMS_IDCONFLICTRESOLVER.out.consensusxml - } else { - // Assign the outcome of the filter q value as export content - export_content = filter_q_value.map { it -> [it[1], it[2]] } + //OPENMS_IDCONFLICTRESOLVER.out.consensusxml + //} else { + // // Assign the outcome of the filter q value as export content + // export_content = filter_q_value.map { it -> [it[1], it[2]] } + // Export all information as text to csv + OPENMS_TEXTEXPORTER(OPENMS_IDCONFLICTRESOLVER.out.consensusxml) + ch_versions = ch_versions.mix(OPENMS_TEXTEXPORTER.out.versions.first().ifEmpty(null)) + // Export all information as mzTab + OPENMS_MZTABEXPORTER(OPENMS_IDCONFLICTRESOLVER.out.consensusxml) + ch_versions = ch_versions.mix(OPENMS_MZTABEXPORTER.out.versions.first().ifEmpty(null)) } - // Export all information as text to csv - OPENMS_TEXTEXPORTER(export_content) - // Export all information as mzTab - OPENMS_MZTABEXPORTER(export_content) ////////////////////////////////////////////////////////////////////////////////////////////// // TODO: Replacement of custom scripts with epytope ch_predicted_possible_neoepitopes = Channel.empty() if ( params.predict_class_1 & !params.skip_quantification ) { // If specified predict peptides using MHCFlurry - PREDICT_PEPTIDES_MHCFLURRY_CLASS_1( + MHCFLURRY_PREDICTPEPTIDESCLASS1( OPENMS_MZTABEXPORTER.out.mztab .map{ it -> [it[0].sample, it[0], it[1]] } .combine( peptides_class_1_alleles, by:0) .map( it -> [it[1], it[2], it[3]]) ) - ch_versions = ch_versions.mix(PREDICT_PEPTIDES_MHCFLURRY_CLASS_1.out.versions.first().ifEmpty(null)) + ch_versions = ch_versions.mix(MHCFLURRY_PREDICTPEPTIDESCLASS1.out.versions.first().ifEmpty(null)) if ( params.include_proteins_from_vcf ) { // Predict all possible neoepitopes from vcf PREDICT_POSSIBLE_NEOEPITOPES(peptides_class_1_alleles.join(ch_vcf_from_sheet, by:0, remainder:true)) @@ -405,27 +400,27 @@ workflow MHCQUANT { ) ch_versions = ch_versions.mix(RESOLVE_FOUND_NEOEPITOPES.out.versions.first().ifEmpty(null)) // Predict class 1 neoepitopes MHCFlurry - PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1(peptides_class_1_alleles.join(RESOLVE_FOUND_NEOEPITOPES.out.csv, by:0)) - ch_versions = ch_versions.mix(PREDICT_NEOEPITOPES_MHCFLURRY_CLASS_1.out.versions.first().ifEmpty(null)) + MHCFLURRY_PREDICTNEOEPITOPESCLASS1(peptides_class_1_alleles.join(RESOLVE_FOUND_NEOEPITOPES.out.csv, by:0)) + ch_versions = ch_versions.mix(MHCFLURRY_PREDICTNEOEPITOPESCLASS1.out.versions.first().ifEmpty(null)) } } ch_predicted_possible_neoepitopes_II = Channel.empty() if ( params.predict_class_2 & !params.skip_quantification ) { // Preprocess found peptides for MHCNuggets prediction class 2 - PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2(OPENMS_MZTABEXPORTER.out.mztab) - ch_versions = ch_versions.mix(PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) + MHCNUGGETS_PEPTIDESCLASS2PRE(OPENMS_MZTABEXPORTER.out.mztab) + ch_versions = ch_versions.mix(MHCNUGGETS_PEPTIDESCLASS2PRE.out.versions.first().ifEmpty(null)) // Predict found peptides using MHCNuggets class 2 - PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2( - PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2.out.preprocessed + MHCNUGGETS_PREDICTPEPTIDESCLASS2( + MHCNUGGETS_PEPTIDESCLASS2PRE.out.preprocessed .map{ it -> [it[0].sample, it[0], it[1]] } .join(peptides_class_2_alleles, by:0) .map( it -> [it[1], it[2], it[3]]) ) - ch_versions = ch_versions.mix(PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) + ch_versions = ch_versions.mix(MHCNUGGETS_PREDICTPEPTIDESCLASS2.out.versions.first().ifEmpty(null)) // Postprocess predicted MHCNuggets peptides class 2 - POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2( PREDICT_PEPTIDES_MHCNUGGETS_CLASS_2.out.csv.join(PREPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2.out.geneID, by:0) ) - ch_versions = ch_versions.mix(POSTPROCESS_PEPTIDES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) + MHCNUGGETS_PEPTIDESCLASS2POST( MHCNUGGETS_PREDICTPEPTIDESCLASS2.out.csv.join(MHCNUGGETS_PEPTIDESCLASS2PRE.out.geneID, by:0) ) + ch_versions = ch_versions.mix(MHCNUGGETS_PEPTIDESCLASS2POST.out.versions.first().ifEmpty(null)) if ( params.include_proteins_from_vcf ) { // Predict all possible class 2 neoepitopes from vcf @@ -440,14 +435,14 @@ workflow MHCQUANT { ) ch_versions = ch_versions.mix(RESOLVE_FOUND_CLASS_2_NEOEPITOPES.out.versions.first().ifEmpty(null)) // Preprocess resolved neoepitopes in a format that MHCNuggets understands - PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2(RESOLVE_FOUND_CLASS_2_NEOEPITOPES.out.csv) - ch_versions = ch_versions.mix(PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) + MHCNUGGETS_NEOEPITOPESCLASS2RE(RESOLVE_FOUND_CLASS_2_NEOEPITOPES.out.csv) + ch_versions = ch_versions.mix(MHCNUGGETS_NEOEPITOPESCLASS2RE.out.versions.first().ifEmpty(null)) // Predict class 2 MHCNuggets - PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2(PREPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.preprocessed.join(peptides_class_2_alleles, by:0)) - ch_versions = ch_versions.mix(PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) + MHCNUGGETS_PREDICTNEOEPITOPESCLASS2(MHCNUGGETS_NEOEPITOPESCLASS2RE.out.preprocessed.join(peptides_class_2_alleles, by:0)) + ch_versions = ch_versions.mix(MHCNUGGETS_PREDICTNEOEPITOPESCLASS2.out.versions.first().ifEmpty(null)) // Class 2 MHCNuggets Postprocessing - POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2(RESOLVE_FOUND_CLASS_2_NEOEPITOPES.out.csv.join(PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.csv, by:0)) - ch_versions = ch_versions.mix(POSTPROCESS_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.versions.first().ifEmpty(null)) + MHCNUGGETS_NEOEPITOPESCLASS2POST(RESOLVE_FOUND_CLASS_2_NEOEPITOPES.out.csv.join(PREDICT_NEOEPITOPES_MHCNUGGETS_CLASS_2.out.csv, by:0)) + ch_versions = ch_versions.mix(MHCNUGGETS_NEOEPITOPESCLASS2POST.out.versions.first().ifEmpty(null)) } } ////////////////////////////////////////////////////////////////////////////////////////////// @@ -467,35 +462,30 @@ workflow MHCQUANT { // // MODULE: Pipeline reporting // - ch_versions - .map { it -> if (it) [ it.baseName, it ] } - .groupTuple() - .map { it[1][0] } - .flatten() - .collect() - .set { ch_versions } - CUSTOM_DUMPSOFTWAREVERSIONS ( ch_versions.unique().collectFile() ) + // // MODULE: MultiQC // - workflow_summary = WorkflowMhcquant.paramsSummaryMultiqc(workflow, summary_params) - ch_workflow_summary = Channel.value(workflow_summary) - - ch_multiqc_files = Channel.empty() - ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_config)) - ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_custom_config.collect().ifEmpty([])) - ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) - ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) - // ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([])) - - MULTIQC ( - ch_multiqc_files.collect() - ) - multiqc_report = MULTIQC.out.report.toList() - ch_versions = ch_versions.mix(MULTIQC.out.version.ifEmpty(null)) + if (!params.skip_multiqc) { + workflow_summary = WorkflowMhcquant.paramsSummaryMultiqc(workflow, summary_params) + ch_workflow_summary = Channel.value(workflow_summary) + + ch_multiqc_files = Channel.empty() + ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_config)) + ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_custom_config.collect().ifEmpty([])) + ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) + ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) + + MULTIQC ( + ch_multiqc_files.collect() + ) + + multiqc_report = MULTIQC.out.report.toList() + } + } /* From 157120df7529d399b6350d68a9224b419643ea85 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 8 Nov 2021 09:43:55 +0100 Subject: [PATCH 019/227] Include the right version annotation for mhcflurry and remove the hardcoded version numbers --- README.md | 6 +-- modules.json | 8 +-- modules/local/generate_proteins_from_vcf.nf | 10 ++-- .../mhcflurry_predictneoepitopesclass1.nf | 2 +- .../local/mhcflurry_predictpeptidesclass1.nf | 7 +-- modules/local/mhcflurry_predictpsms.nf | 2 +- .../local/mhcnuggets_neoepitopesclass2post.nf | 2 - .../local/mhcnuggets_neoepitopesclass2pre.nf | 2 - .../local/mhcnuggets_peptidesclass2post.nf | 2 - modules/local/mhcnuggets_peptidesclass2pre.nf | 2 - .../mhcnuggets_predictneoepitopesclass2.nf | 2 - .../local/mhcnuggets_predictpeptidesclass2.nf | 5 +- .../predict_possible_class_2_neoepitopes.nf | 7 +-- modules/local/predict_possible_neoepitopes.nf | 7 +-- .../resolve_found_class_2_neoepitopes.nf | 7 +-- modules/local/resolve_found_neoepitopes.nf | 9 ++-- .../custom/dumpsoftwareversions/meta.yml | 2 +- modules/nf-core/modules/fastqc/functions.nf | 54 +++++++++++-------- modules/nf-core/modules/fastqc/main.nf | 19 ++++--- modules/nf-core/modules/fastqc/meta.yml | 7 +-- modules/nf-core/modules/multiqc/functions.nf | 54 +++++++++++-------- modules/nf-core/modules/multiqc/main.nf | 17 +++--- modules/nf-core/modules/multiqc/meta.yml | 7 +-- 23 files changed, 121 insertions(+), 119 deletions(-) diff --git a/README.md b/README.md index 7a7455d9..f2b0f2be 100644 --- a/README.md +++ b/README.md @@ -3,13 +3,9 @@ [![GitHub Actions CI Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/mhcquant/actions?query=workflow%3A%22nf-core+CI%22) [![GitHub Actions Linting Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/mhcquant/actions?query=workflow%3A%22nf-core+linting%22) [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/mhcquant/results) -[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) - -[![GitHub Actions CI Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/mhcquant/actions) -[![GitHub Actions Linting Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/mhcquant/actions) [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.5407955-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.1400710) -[![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A521.04.0-brightgreen.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) diff --git a/modules.json b/modules.json index ee307765..a817ae90 100644 --- a/modules.json +++ b/modules.json @@ -4,14 +4,14 @@ "repos": { "nf-core/modules": { "custom/dumpsoftwareversions": { - "git_sha": "84f2302920078b0cf7716b2a2e5fcc0be5c4531d" + "git_sha": "3aacd46da2b221ed47aaa05c413a828538d2c2ae" }, "fastqc": { - "git_sha": "e937c7950af70930d1f34bb961403d9d2aa81c7d" + "git_sha": "3aacd46da2b221ed47aaa05c413a828538d2c2ae" }, "multiqc": { - "git_sha": "e937c7950af70930d1f34bb961403d9d2aa81c7d" + "git_sha": "3aacd46da2b221ed47aaa05c413a828538d2c2ae" } } } -} +} \ No newline at end of file diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index 6bf8f397..e63e45e4 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -34,9 +34,11 @@ process GENERATE_PROTEINS_FROM_VCF { """ variants2fasta.py -v ${vcf} -f ${fasta} -o ${meta.sample}_${prefix}.fasta $options.args - - echo $VERSIONFRED2 > fred2.version.txt - echo $VERSIONMHCNUGGETS > mhcnuggets.version.txt - echo \$(mhcflurry-predict --version 2>&1) | sed 's/^.*mhcflurry //; s/ .*\$//' &> mhcflurry.version.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + END_VERSIONS """ } diff --git a/modules/local/mhcflurry_predictneoepitopesclass1.nf b/modules/local/mhcflurry_predictneoepitopesclass1.nf index 9f76d4cd..8b4520d5 100644 --- a/modules/local/mhcflurry_predictneoepitopesclass1.nf +++ b/modules/local/mhcflurry_predictneoepitopesclass1.nf @@ -36,7 +36,7 @@ process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) END_VERSIONS """ } diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index 1c122662..d109e3df 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -4,9 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -// def VERSIONFRED2 = '2.0.6' -// def VERSIONMHCNUGGETS = '2.3.2' - process MHCFLURRY_PREDICTPEPTIDESCLASS1 { tag "$meta" label 'process_low' @@ -38,9 +35,9 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - FRED2: \$(echo \$(python -c "import pkg_resources; print 'fred2 ' + pkg_resources.get_distribution('Fred2').version" | sed 's/^fred2 //; s/ .*\$//')) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) END_VERSIONS """ } diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index bda370fe..42096e89 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -35,7 +35,7 @@ process MHCFLURRY_PREDICTPSMS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) END_VERSIONS """ diff --git a/modules/local/mhcnuggets_neoepitopesclass2post.nf b/modules/local/mhcnuggets_neoepitopesclass2post.nf index 09f35777..c570c7a6 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2post.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2post.nf @@ -4,8 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -// def VERSION = '2.3.2' - process MHCNUGGETS_NEOEPITOPESCLASS2POST { tag "$meta" label 'process_low' diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index c6057248..0e2cab71 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -4,8 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSION = '2.3.2' - process MHCNUGGETS_NEOEPITOPESCLASS2RE { tag "$meta" label 'process_low' diff --git a/modules/local/mhcnuggets_peptidesclass2post.nf b/modules/local/mhcnuggets_peptidesclass2post.nf index d598ed36..5f8ebd96 100644 --- a/modules/local/mhcnuggets_peptidesclass2post.nf +++ b/modules/local/mhcnuggets_peptidesclass2post.nf @@ -4,8 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -// def VERSION = '2.3.2' - process MHCNUGGETS_PEPTIDESCLASS2POST { tag "$meta" label 'process_low' diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index d3a18d01..c60263f2 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -4,8 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSION = '2.3.2' - process MHCNUGGETS_PEPTIDESCLASS2PRE { tag "$meta" label 'process_low' diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index be6d9180..ef201fb5 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -4,8 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -// def VERSION = '2.3.2' - process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { tag "$meta" label 'process_low' diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index a6bae12f..af8d0fa7 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -4,9 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -// def VERSIONFRED2 = '2.0.6' -// def VERSIONMHCNUGGETS = '2.3.2' - process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { tag "$meta" label 'process_low' @@ -34,7 +31,7 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) END_VERSIONS """ } diff --git a/modules/local/predict_possible_class_2_neoepitopes.nf b/modules/local/predict_possible_class_2_neoepitopes.nf index 97426d67..4011ebfc 100644 --- a/modules/local/predict_possible_class_2_neoepitopes.nf +++ b/modules/local/predict_possible_class_2_neoepitopes.nf @@ -4,9 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -// def VERSIONFRED2 = '2.0.6' -// def VERSIONMHCNUGGETS = '2.3.2' - process PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES { tag "$meta" label 'process_low' @@ -38,9 +35,9 @@ process PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) END_VERSIONS """ } diff --git a/modules/local/predict_possible_neoepitopes.nf b/modules/local/predict_possible_neoepitopes.nf index b2c85d2c..fe6461da 100644 --- a/modules/local/predict_possible_neoepitopes.nf +++ b/modules/local/predict_possible_neoepitopes.nf @@ -4,9 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSIONFRED2 = '2.0.6' -def VERSIONMHCNUGGETS = '2.3.2' - process PREDICT_POSSIBLE_NEOEPITOPES { tag "$meta" label 'process_low' @@ -38,9 +35,9 @@ process PREDICT_POSSIBLE_NEOEPITOPES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) END_VERSIONS """ } diff --git a/modules/local/resolve_found_class_2_neoepitopes.nf b/modules/local/resolve_found_class_2_neoepitopes.nf index 85a0dbcd..fa855187 100644 --- a/modules/local/resolve_found_class_2_neoepitopes.nf +++ b/modules/local/resolve_found_class_2_neoepitopes.nf @@ -4,9 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSIONFRED2 = '2.0.6' -def VERSIONMHCNUGGETS = '2.3.2' - process RESOLVE_FOUND_CLASS_2_NEOEPITOPES { tag "$meta" label 'process_low' @@ -39,9 +36,9 @@ process RESOLVE_FOUND_CLASS_2_NEOEPITOPES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) END_VERSIONS """ } diff --git a/modules/local/resolve_found_neoepitopes.nf b/modules/local/resolve_found_neoepitopes.nf index a13ed77a..bc8859c3 100644 --- a/modules/local/resolve_found_neoepitopes.nf +++ b/modules/local/resolve_found_neoepitopes.nf @@ -4,9 +4,6 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -def VERSIONFRED2 = '2.0.6' -def VERSIONMHCNUGGETS = '2.3.2' - process RESOLVE_FOUND_NEOEPITOPES { tag "$meta" label 'process_low' @@ -37,9 +34,9 @@ process RESOLVE_FOUND_NEOEPITOPES { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: - mhcflurry: \$(mhcflurry-predict --version | sed 's/^mhcflurry //; s/ .*\$//' ) - mhcnuggets: \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//') - FRED2: \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//') + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//')) + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) END_VERSIONS """ } diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml b/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml index 8d4a6ed4..c8310e35 100644 --- a/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml +++ b/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml @@ -8,7 +8,7 @@ tools: description: Custom module used to dump software versions within the nf-core pipeline template homepage: https://github.com/nf-core/tools documentation: https://github.com/nf-core/tools - + licence: ['MIT'] input: - versions: type: file diff --git a/modules/nf-core/modules/fastqc/functions.nf b/modules/nf-core/modules/fastqc/functions.nf index da9da093..85628ee0 100644 --- a/modules/nf-core/modules/fastqc/functions.nf +++ b/modules/nf-core/modules/fastqc/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/nf-core/modules/fastqc/main.nf b/modules/nf-core/modules/fastqc/main.nf index 39c327b2..9f6cfc55 100644 --- a/modules/nf-core/modules/fastqc/main.nf +++ b/modules/nf-core/modules/fastqc/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -24,24 +24,31 @@ process FASTQC { output: tuple val(meta), path("*.html"), emit: html tuple val(meta), path("*.zip") , emit: zip - path "*.version.txt" , emit: version + path "versions.yml" , emit: versions script: // Add soft-links to original FastQs for consistent naming in pipeline - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz fastqc $options.args --threads $task.cpus ${prefix}.fastq.gz - fastqc --version | sed -e "s/FastQC v//g" > ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS """ } else { """ [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz fastqc $options.args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz - fastqc --version | sed -e "s/FastQC v//g" > ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS """ } } diff --git a/modules/nf-core/modules/fastqc/meta.yml b/modules/nf-core/modules/fastqc/meta.yml index 8eb9953d..b09553a3 100644 --- a/modules/nf-core/modules/fastqc/meta.yml +++ b/modules/nf-core/modules/fastqc/meta.yml @@ -15,6 +15,7 @@ tools: overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ + licence: ['GPL-2.0-only'] input: - meta: type: map @@ -40,10 +41,10 @@ output: type: file description: FastQC report archive pattern: "*_{fastqc.zip}" - - version: + - versions: type: file - description: File containing software version - pattern: "*.{version.txt}" + description: File containing software versions + pattern: "versions.yml" authors: - "@drpatelh" - "@grst" diff --git a/modules/nf-core/modules/multiqc/functions.nf b/modules/nf-core/modules/multiqc/functions.nf index da9da093..85628ee0 100644 --- a/modules/nf-core/modules/multiqc/functions.nf +++ b/modules/nf-core/modules/multiqc/functions.nf @@ -9,6 +9,13 @@ def getSoftwareName(task_process) { return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() } +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + // // Function to initialise default values and to generate a Groovy Map of available options for nf-core modules // @@ -37,32 +44,35 @@ def getPathFromList(path_list) { // Function to save/publish module results // def saveFiles(Map args) { - if (!args.filename.endsWith('.version.txt')) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] } + path = path instanceof String ? path : '' + path_list.add(path) } } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" } } diff --git a/modules/nf-core/modules/multiqc/main.nf b/modules/nf-core/modules/multiqc/main.nf index da780800..0861aa59 100644 --- a/modules/nf-core/modules/multiqc/main.nf +++ b/modules/nf-core/modules/multiqc/main.nf @@ -1,5 +1,5 @@ // Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' params.options = [:] options = initOptions(params.options) @@ -10,11 +10,11 @@ process MULTIQC { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::multiqc=1.10.1" : null) + conda (params.enable_conda ? 'bioconda::multiqc=1.11' : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.10.1--py_0" + container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" } else { - container "quay.io/biocontainers/multiqc:1.10.1--py_0" + container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" } input: @@ -24,12 +24,15 @@ process MULTIQC { path "*multiqc_report.html", emit: report path "*_data" , emit: data path "*_plots" , optional:true, emit: plots - path "*.version.txt" , emit: version + path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) """ multiqc -f $options.args . - multiqc --version | sed -e "s/multiqc, version //g" > ${software}.version.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + END_VERSIONS """ } diff --git a/modules/nf-core/modules/multiqc/meta.yml b/modules/nf-core/modules/multiqc/meta.yml index 532a8bb1..63c75a45 100644 --- a/modules/nf-core/modules/multiqc/meta.yml +++ b/modules/nf-core/modules/multiqc/meta.yml @@ -11,6 +11,7 @@ tools: It's a general use tool, perfect for summarising the output from numerous bioinformatics tools. homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ + licence: ['GPL-3.0-or-later'] input: - multiqc_files: type: file @@ -29,10 +30,10 @@ output: type: file description: Plots created by MultiQC pattern: "*_data" - - version: + - versions: type: file - description: File containing software version - pattern: "*.{version.txt}" + description: File containing software versions + pattern: "versions.yml" authors: - "@abhi18av" - "@bunop" From f1e2cd595764b9ee05d2950be0e491ad76f5cf84 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 8 Nov 2021 09:47:49 +0100 Subject: [PATCH 020/227] Resolve markdown complications --- CHANGELOG.md | 1 - docs/usage.md | 92 +++++++++++++++++++++++---------------------------- 2 files changed, 42 insertions(+), 51 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index af0c27f6..77ac83b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,7 +24,6 @@ Note, since the pipeline is now using Nextflow DSL2, each process will be run wi | `openms-thirdparty` | 2.5.0 | 2.6.0 | | `thermorawfileparser` | 1.2.3 | 1.3.4 | - > **NB:** Dependency has been **updated** if both old and new version information is present. > **NB:** Dependency has been **added** if just the new version information is present. > **NB:** Dependency has been **removed** if version information isn't present. diff --git a/docs/usage.md b/docs/usage.md index 6b3f8605..28d4b773 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -2,21 +2,21 @@ ## Table of contents -* [Table of contents](#table-of-contents) -* [Samplesheet input](#samplesheet-input) - * [Multiple runs of the same sample](#multiple-runs-of-the-same-sample) - * [Full samplesheet](#full-samplesheet) -* [Running the pipeline](#running-the-pipeline) - * [Updating the pipeline](#updating-the-pipeline) - * [Reproducibility](#reproducibility) -* [Core Nextflow arguments](#core-nextflow-arguments) - * [`-profile`](#-profile) - * [`-resume`](#-resume) -* [Custom configuration](#custom-configuration) - * [Resource requests](#resource-requests) - * [nf-core/configs](#nf-core-configs) -* [Running in the background](#running-in-the-background) -* [Nextflow memory requirements](#nextflow-memory-requirements) +- [Table of contents](#table-of-contents) +- [Samplesheet input](#samplesheet-input) + - [Multiple runs of the same sample](#multiple-runs-of-the-same-sample) + - [Full samplesheet](#full-samplesheet) +- [Running the pipeline](#running-the-pipeline) + - [Updating the pipeline](#updating-the-pipeline) + - [Reproducibility](#reproducibility) +- [Core Nextflow arguments](#core-nextflow-arguments) + - [`-profile`](#-profile) + - [`-resume`](#-resume) +- [Custom configuration](#custom-configuration) + - [Resource requests](#resource-requests) + - [nf-core/configs](#nf-core-configs) +- [Running in the background](#running-in-the-background) +- [Nextflow memory requirements](#nextflow-memory-requirements) ## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/mhcquant/usage](https://nf-co.re/mhcquant/usage) @@ -66,23 +66,15 @@ ID\tSample\tCondition\tReplicateFileName ``` -| Column | Description | -|----------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `ID` | An incrementing value which acts as a unique number for the given sample | -| `Sample` | Custom sample name. This entry will be identical for multiple MS runs from the same sample. Spaces in sample names are automatically converted to underscores (`_`). | -| `Condition` | Additional information of the sample can be defined here.| -| `ReplicateFileName` | Full path to the MS outcome file. These files have the extentions ".raw" or ".mzML" | +| Column | Description | +| ------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `ID` | An incrementing value which acts as a unique number for the given sample | +| `Sample` | Custom sample name. This entry will be identical for multiple MS runs from the same sample. Spaces in sample names are automatically converted to underscores (`_`). | +| `Condition` | Additional information of the sample can be defined here. | +| `ReplicateFileName` | Full path to the MS outcome file. These files have the extentions ".raw" or ".mzML" | An [example samplesheet](../assets/samplesheet.tsv) has been provided with the pipeline. -## Samplesheet input - -You will need to create a samplesheet with information about the samples you would like to analyse before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row as shown in the examples below. - -```console ---input '[path to samplesheet file]' -``` - ### Multiple runs of the same sample The `sample` identifiers have to be the same when you have re-sequenced the same sample more than once e.g. to increase sequencing depth. The pipeline will concatenate the raw reads before performing any downstream analysis. Below is an example for the same sample sequenced across 3 lanes: @@ -111,11 +103,11 @@ TREATMENT_REP3,AEG588A6_S6_L003_R1_001.fastq.gz, TREATMENT_REP3,AEG588A6_S6_L004_R1_001.fastq.gz, ``` -| Column | Description | -|----------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `sample` | Custom sample name. This entry will be identical for multiple sequencing libraries/runs from the same sample. Spaces in sample names are automatically converted to underscores (`_`). | -| `fastq_1` | Full path to FastQ file for Illumina short reads 1. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | -| `fastq_2` | Full path to FastQ file for Illumina short reads 2. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | +| Column | Description | +| --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `sample` | Custom sample name. This entry will be identical for multiple sequencing libraries/runs from the same sample. Spaces in sample names are automatically converted to underscores (`_`). | +| `fastq_1` | Full path to FastQ file for Illumina short reads 1. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | +| `fastq_2` | Full path to FastQ file for Illumina short reads 2. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | An [example samplesheet](../assets/samplesheet.csv) has been provided with the pipeline. @@ -173,21 +165,21 @@ They are loaded in sequence, so later profiles can overwrite earlier profiles. If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended. -* `docker` - * A generic configuration profile to be used with [Docker](https://docker.com/) -* `singularity` - * A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/) -* `podman` - * A generic configuration profile to be used with [Podman](https://podman.io/) -* `shifter` - * A generic configuration profile to be used with [Shifter](https://nersc.gitlab.io/development/shifter/how-to-use/) -* `charliecloud` - * A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) -* `conda` - * A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. -* `test` - * A profile with a complete configuration for automated testing - * Includes links to test data so needs no other parameters +- `docker` + - A generic configuration profile to be used with [Docker](https://docker.com/) +- `singularity` + - A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/) +- `podman` + - A generic configuration profile to be used with [Podman](https://podman.io/) +- `shifter` + - A generic configuration profile to be used with [Shifter](https://nersc.gitlab.io/development/shifter/how-to-use/) +- `charliecloud` + - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) +- `conda` + - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. +- `test` + - A profile with a complete configuration for automated testing + - Includes links to test data so needs no other parameters ### `-resume` @@ -234,7 +226,7 @@ Work dir: Tip: you can replicate the issue by changing to the process work dir and entering the command `bash .command.run` ``` -To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so based on the search results the file we want is `modules/nf-core/software/star/align/main.nf`. If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. Providing you haven't set any other standard nf-core parameters to __cap__ the [maximum resources](https://nf-co.re/usage/configuration#max-resources) used by the pipeline then we can try and bypass the `STAR_ALIGN` process failure by creating a custom config file that sets at least 72GB of memory, in this case increased to 100GB. The custom config below can then be provided to the pipeline via the [`-c`](#-c) parameter as highlighted in previous sections. +To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so based on the search results the file we want is `modules/nf-core/software/star/align/main.nf`. If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. Providing you haven't set any other standard nf-core parameters to **cap** the [maximum resources](https://nf-co.re/usage/configuration#max-resources) used by the pipeline then we can try and bypass the `STAR_ALIGN` process failure by creating a custom config file that sets at least 72GB of memory, in this case increased to 100GB. The custom config below can then be provided to the pipeline via the [`-c`](#-c) parameter as highlighted in previous sections. ```nextflow process { From e41fccf9bbaadbaf3eb6090344159e8bf352ab6d Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 8 Nov 2021 10:01:47 +0100 Subject: [PATCH 021/227] Resolve markdown complications --- docs/usage.md | 88 ++++++++++++++++++--------------------------------- 1 file changed, 30 insertions(+), 58 deletions(-) diff --git a/docs/usage.md b/docs/usage.md index 28d4b773..12e88d85 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -2,21 +2,21 @@ ## Table of contents -- [Table of contents](#table-of-contents) -- [Samplesheet input](#samplesheet-input) - - [Multiple runs of the same sample](#multiple-runs-of-the-same-sample) - - [Full samplesheet](#full-samplesheet) -- [Running the pipeline](#running-the-pipeline) - - [Updating the pipeline](#updating-the-pipeline) - - [Reproducibility](#reproducibility) -- [Core Nextflow arguments](#core-nextflow-arguments) - - [`-profile`](#-profile) - - [`-resume`](#-resume) -- [Custom configuration](#custom-configuration) - - [Resource requests](#resource-requests) - - [nf-core/configs](#nf-core-configs) -- [Running in the background](#running-in-the-background) -- [Nextflow memory requirements](#nextflow-memory-requirements) +- [Table of contents](#table-of-contents) +- [Samplesheet input](#samplesheet-input) + - [Multiple runs of the same sample](#multiple-runs-of-the-same-sample) + - [Full samplesheet](#full-samplesheet) +- [Running the pipeline](#running-the-pipeline) + - [Updating the pipeline](#updating-the-pipeline) + - [Reproducibility](#reproducibility) +- [Core Nextflow arguments](#core-nextflow-arguments) + - [`-profile`](#-profile) + - [`-resume`](#-resume) +- [Custom configuration](#custom-configuration) + - [Resource requests](#resource-requests) + - [nf-core/configs](#nf-core-configs) +- [Running in the background](#running-in-the-background) +- [Nextflow memory requirements](#nextflow-memory-requirements) ## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/mhcquant/usage](https://nf-co.re/mhcquant/usage) @@ -75,34 +75,6 @@ ID\tSample\tCondition\tReplicateFileName An [example samplesheet](../assets/samplesheet.tsv) has been provided with the pipeline. -### Multiple runs of the same sample - -The `sample` identifiers have to be the same when you have re-sequenced the same sample more than once e.g. to increase sequencing depth. The pipeline will concatenate the raw reads before performing any downstream analysis. Below is an example for the same sample sequenced across 3 lanes: - -```console -sample,fastq_1,fastq_2 -CONTROL_REP1,AEG588A1_S1_L002_R1_001.fastq.gz,AEG588A1_S1_L002_R2_001.fastq.gz -CONTROL_REP1,AEG588A1_S1_L003_R1_001.fastq.gz,AEG588A1_S1_L003_R2_001.fastq.gz -CONTROL_REP1,AEG588A1_S1_L004_R1_001.fastq.gz,AEG588A1_S1_L004_R2_001.fastq.gz -``` - -### Full samplesheet - -The pipeline will auto-detect whether a sample is single- or paired-end using the information provided in the samplesheet. The samplesheet can have as many columns as you desire, however, there is a strict requirement for the first 3 columns to match those defined in the table below. - -A final samplesheet file consisting of both single- and paired-end data may look something like the one below. This is for 6 samples, where `TREATMENT_REP3` has been sequenced twice. - -```console -sample,fastq_1,fastq_2 -CONTROL_REP1,AEG588A1_S1_L002_R1_001.fastq.gz,AEG588A1_S1_L002_R2_001.fastq.gz -CONTROL_REP2,AEG588A2_S2_L002_R1_001.fastq.gz,AEG588A2_S2_L002_R2_001.fastq.gz -CONTROL_REP3,AEG588A3_S3_L002_R1_001.fastq.gz,AEG588A3_S3_L002_R2_001.fastq.gz -TREATMENT_REP1,AEG588A4_S4_L003_R1_001.fastq.gz, -TREATMENT_REP2,AEG588A5_S5_L003_R1_001.fastq.gz, -TREATMENT_REP3,AEG588A6_S6_L003_R1_001.fastq.gz, -TREATMENT_REP3,AEG588A6_S6_L004_R1_001.fastq.gz, -``` - | Column | Description | | --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `sample` | Custom sample name. This entry will be identical for multiple sequencing libraries/runs from the same sample. Spaces in sample names are automatically converted to underscores (`_`). | @@ -165,21 +137,21 @@ They are loaded in sequence, so later profiles can overwrite earlier profiles. If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended. -- `docker` - - A generic configuration profile to be used with [Docker](https://docker.com/) -- `singularity` - - A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/) -- `podman` - - A generic configuration profile to be used with [Podman](https://podman.io/) -- `shifter` - - A generic configuration profile to be used with [Shifter](https://nersc.gitlab.io/development/shifter/how-to-use/) -- `charliecloud` - - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) -- `conda` - - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. -- `test` - - A profile with a complete configuration for automated testing - - Includes links to test data so needs no other parameters +- `docker` + - A generic configuration profile to be used with [Docker](https://docker.com/) +- `singularity` + - A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/) +- `podman` + - A generic configuration profile to be used with [Podman](https://podman.io/) +- `shifter` + - A generic configuration profile to be used with [Shifter](https://nersc.gitlab.io/development/shifter/how-to-use/) +- `charliecloud` + - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) +- `conda` + - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. +- `test` + - A profile with a complete configuration for automated testing + - Includes links to test data so needs no other parameters ### `-resume` From c46bdbf16e6e2d1f85593e647780f7c9203bf370 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 8 Nov 2021 10:08:58 +0100 Subject: [PATCH 022/227] Remove duplicate informaiton from the usage.md --- docs/usage.md | 9 --------- 1 file changed, 9 deletions(-) diff --git a/docs/usage.md b/docs/usage.md index 12e88d85..db3772cc 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -74,15 +74,6 @@ ID\tSample\tCondition\tReplicateFileName | `ReplicateFileName` | Full path to the MS outcome file. These files have the extentions ".raw" or ".mzML" | An [example samplesheet](../assets/samplesheet.tsv) has been provided with the pipeline. - -| Column | Description | -| --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `sample` | Custom sample name. This entry will be identical for multiple sequencing libraries/runs from the same sample. Spaces in sample names are automatically converted to underscores (`_`). | -| `fastq_1` | Full path to FastQ file for Illumina short reads 1. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | -| `fastq_2` | Full path to FastQ file for Illumina short reads 2. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | - -An [example samplesheet](../assets/samplesheet.csv) has been provided with the pipeline. - ## Running the pipeline The typical command for running the pipeline is as follows: From c0c71d0ba58690c832ea62b7458d11d1817172c8 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Mon, 8 Nov 2021 10:11:15 +0100 Subject: [PATCH 023/227] Remove duplicate informaiton from the usage.md --- docs/usage.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/usage.md b/docs/usage.md index db3772cc..879ed17d 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -74,6 +74,7 @@ ID\tSample\tCondition\tReplicateFileName | `ReplicateFileName` | Full path to the MS outcome file. These files have the extentions ".raw" or ".mzML" | An [example samplesheet](../assets/samplesheet.tsv) has been provided with the pipeline. + ## Running the pipeline The typical command for running the pipeline is as follows: From ae0e927a41b9e4c95174897c98017a35688a8d79 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 24 Nov 2021 16:43:37 +0100 Subject: [PATCH 024/227] Update CITATIONS.md Co-authored-by: Gisela Gabernet --- CITATIONS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CITATIONS.md b/CITATIONS.md index 71b78eef..521a4e0d 100644 --- a/CITATIONS.md +++ b/CITATIONS.md @@ -21,6 +21,7 @@ * [OpenMS](https://pubmed.ncbi.nlm.nih.gov/27575624/) > Röst H, Sachsenberg T, Aiche S, Bielow C, Weisser H, Aicheler F, Andreotti S, Ehrlich HC, Gutenbrunner P, Kenar E, Liang X, Nahnsen S, Nilse L, Pfeuffer J, Rosenberger G, Rurik M, Schmitt U, Veit J, Walzer M, Wojnar D, Wolski WE, Schilling O, Choudhary JS, Malmström L, Aebersold R, Reinert K, Kohlbacher O. OpenMS: a flexible open-source software platform for mass spectrometry data analysis. Nat Methods 13, 741–748 (2016). doi: 10.1038/nmeth.3959. PubMed PMID: 27575624 + * [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) * [MultiQC](https://www.ncbi.nlm.nih.gov/pubmed/27312411/) From 48c585d5f616550245f719fe52c5dd5fd354f2e9 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 24 Nov 2021 16:43:54 +0100 Subject: [PATCH 025/227] Update README.md Co-authored-by: Gisela Gabernet --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f2b0f2be..034cc0ee 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ On release, automated continuous integration tests run the pipeline on a full-si 1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.04.0`) -2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_. Note: This pipeline does not currently support running with Conda on macOS if the `--remove_ribo_rna` parameter is used because the latest version of the SortMeRNA package is not available for this platform. +2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_. 3. Download the pipeline and test it on a minimal dataset with a single command: From c3ddd8bddc75ff70284a1adcbbce26cff6e0d714 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 24 Nov 2021 16:45:14 +0100 Subject: [PATCH 026/227] Update modules/local/openms_thermorawfileparser.nf Co-authored-by: Gisela Gabernet --- modules/local/openms_thermorawfileparser.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/local/openms_thermorawfileparser.nf b/modules/local/openms_thermorawfileparser.nf index 9ca958f1..85318312 100644 --- a/modules/local/openms_thermorawfileparser.nf +++ b/modules/local/openms_thermorawfileparser.nf @@ -30,7 +30,6 @@ process OPENMS_THERMORAWFILEPARSER { ThermoRawFileParser.sh -i=${rawfile} \\ -f=2 \\ -b=${prefix}.mzML - > ThermoRawFileParser.version.txt cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: From 393451a74e52bdd400f7b1177d2b138cc86747ff Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 24 Nov 2021 16:45:52 +0100 Subject: [PATCH 027/227] Update nextflow.config Co-authored-by: Gisela Gabernet --- nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nextflow.config b/nextflow.config index 138c3a52..cb09dc50 100644 --- a/nextflow.config +++ b/nextflow.config @@ -211,7 +211,7 @@ dag { manifest { name = 'nf-core/mhcquant' - author = 'Leon Bichmann' + author = 'Leon Bichmann, Marissa Dubbelaar' homePage = 'https://github.com/nf-core/mhcquant' description = 'Identify and quantify peptides from mass spectrometry raw data' mainScript = 'main.nf' From c69e30590a69b1b5d2cab10ef283b7a9e8e98bca Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 25 Nov 2021 09:23:37 +0100 Subject: [PATCH 028/227] Fix issue with zendodo link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 034cc0ee..20157511 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![GitHub Actions CI Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/mhcquant/actions?query=workflow%3A%22nf-core+CI%22) [![GitHub Actions Linting Status](https://github.com/nf-core/mhcquant/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/mhcquant/actions?query=workflow%3A%22nf-core+linting%22) [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/mhcquant/results) -[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.5407955-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.1400710) +[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.5407955-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.5407955) [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) From aa52cc074d85934d6d573c4a5ca8582c7dd3bcd9 Mon Sep 17 00:00:00 2001 From: marissaDubbelaar Date: Thu, 25 Nov 2021 09:38:38 +0100 Subject: [PATCH 029/227] fix linting problems with the license --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index dcab23b0..43eea689 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) Leon Bichmann +Copyright (c) Leon Bichmann, Marissa Dubbelaar Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal From ccd2881eb17dba1b65c4c63574b230f112a392de Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 25 Nov 2021 17:12:54 +0100 Subject: [PATCH 030/227] Update mhcquant.nf --- workflows/mhcquant.nf | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index de46bdd0..a2ce1035 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -193,7 +193,7 @@ workflow MHCQUANT { .map(it -> [it[1], it[2], it[3]]) // If specified translate variants to proteins and include in reference fasta GENERATE_PROTEINS_FROM_VCF( ch_vcf ) - // ch_versions = ch_versions.mix(GENERATE_PROTEINS_FROM_VCF.out.versions.first().ifEmpty(null)) + ch_versions = ch_versions.mix(GENERATE_PROTEINS_FROM_VCF.out.versions.first().ifEmpty(null)) ch_fasta_file = GENERATE_PROTEINS_FROM_VCF.out.vcf_fasta } else { ch_fasta_file = input_fasta @@ -361,11 +361,6 @@ workflow MHCQUANT { // Resolve conflicting ids matching to the same feature OPENMS_IDCONFLICTRESOLVER(OPENMS_FEATURELINKERUNLABELEDKD.out.consensusxml) ch_versions = ch_versions.mix(OPENMS_IDCONFLICTRESOLVER.out.versions.first().ifEmpty(null)) - // Assign the outcome of the id conflict resolver as export content - //OPENMS_IDCONFLICTRESOLVER.out.consensusxml - //} else { - // // Assign the outcome of the filter q value as export content - // export_content = filter_q_value.map { it -> [it[1], it[2]] } // Export all information as text to csv OPENMS_TEXTEXPORTER(OPENMS_IDCONFLICTRESOLVER.out.consensusxml) ch_versions = ch_versions.mix(OPENMS_TEXTEXPORTER.out.versions.first().ifEmpty(null)) From 81265a88cc8b782eebb293e1398ac2bd7d3436b2 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 25 Nov 2021 17:14:27 +0100 Subject: [PATCH 031/227] Update CITATIONS.md --- CITATIONS.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/CITATIONS.md b/CITATIONS.md index 521a4e0d..68d4abe9 100644 --- a/CITATIONS.md +++ b/CITATIONS.md @@ -22,8 +22,6 @@ * [OpenMS](https://pubmed.ncbi.nlm.nih.gov/27575624/) > Röst H, Sachsenberg T, Aiche S, Bielow C, Weisser H, Aicheler F, Andreotti S, Ehrlich HC, Gutenbrunner P, Kenar E, Liang X, Nahnsen S, Nilse L, Pfeuffer J, Rosenberger G, Rurik M, Schmitt U, Veit J, Walzer M, Wojnar D, Wolski WE, Schilling O, Choudhary JS, Malmström L, Aebersold R, Reinert K, Kohlbacher O. OpenMS: a flexible open-source software platform for mass spectrometry data analysis. Nat Methods 13, 741–748 (2016). doi: 10.1038/nmeth.3959. PubMed PMID: 27575624 -* [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) - * [MultiQC](https://www.ncbi.nlm.nih.gov/pubmed/27312411/) > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. From 53d76cd8aeabe6dea6ae1592772c9c5d25f1ddcc Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 25 Nov 2021 17:15:21 +0100 Subject: [PATCH 032/227] Update README.md --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index 20157511..d7698ca8 100644 --- a/README.md +++ b/README.md @@ -28,8 +28,7 @@ On release, automated continuous integration tests run the pipeline on a full-si ## Pipeline summary -1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)) -2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)) +1. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)) ![overview](assets/MHCquant_scheme.png) (This chart was created with the help of [Lucidchart](https://www.lucidchart.com)) From bbbf327b0ca20bac462bcc33b1f644010042acd3 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 13:51:05 +0100 Subject: [PATCH 033/227] Update lib/WorkflowMain.groovy Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- lib/WorkflowMain.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index c7fdc5b4..458879a6 100644 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -10,7 +10,7 @@ class WorkflowMain { public static String citation(workflow) { return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + - " https://doi.org/10.5281/zenodo.5407955\n\n" + + " https://doi.org/10.5281/zenodo.1569909\n\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + From b223aafbdcf1028fed9d3a7bdf767dea04f84161 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 13:52:14 +0100 Subject: [PATCH 034/227] Update nextflow.config remove commented igenomes related information --- nextflow.config | 2 -- 1 file changed, 2 deletions(-) diff --git a/nextflow.config b/nextflow.config index cb09dc50..99379ce1 100644 --- a/nextflow.config +++ b/nextflow.config @@ -18,8 +18,6 @@ params { // References genomes = null - //igenomes_base = 's3://ngi-igenomes/igenomes' - //igenomes_ignore = true // Workflow options allele_sheet = false From ccdecd1b29e018305ceec9cc17112f8c1515b6df Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 13:52:34 +0100 Subject: [PATCH 035/227] Update lib/WorkflowMain.groovy Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- lib/WorkflowMain.groovy | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 458879a6..d12756c7 100644 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -9,6 +9,8 @@ class WorkflowMain { // public static String citation(workflow) { return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + + "* The pipeline publication\n" + + " https://doi.org/10.1021/acs.jproteome.9b00313\n\n" + "* The pipeline\n" + " https://doi.org/10.5281/zenodo.1569909\n\n" + "* The nf-core framework\n" + From 2e0c2ba13a51012a0617f76566f88fba6ea3f67c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 13:53:42 +0100 Subject: [PATCH 036/227] Delete igenomes.config No used in the process --- conf/igenomes.config | 432 ------------------------------------------- 1 file changed, 432 deletions(-) delete mode 100644 conf/igenomes.config diff --git a/conf/igenomes.config b/conf/igenomes.config deleted file mode 100644 index 855948de..00000000 --- a/conf/igenomes.config +++ /dev/null @@ -1,432 +0,0 @@ -/* -======================================================================================== - Nextflow config file for iGenomes paths -======================================================================================== - Defines reference genomes using iGenome paths. - Can be used by any config that customises the base path using: - $params.igenomes_base / --igenomes_base ----------------------------------------------------------------------------------------- -*/ - -params { - // illumina iGenomes reference file paths - genomes { - 'GRCh37' { - fasta = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/README.txt" - mito_name = "MT" - macs_gsize = "2.7e9" - blacklist = "${projectDir}/assets/blacklists/GRCh37-blacklist.bed" - } - 'GRCh38' { - fasta = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Annotation/Genes/genes.bed" - mito_name = "chrM" - macs_gsize = "2.7e9" - blacklist = "${projectDir}/assets/blacklists/hg38-blacklist.bed" - } - 'GRCm38' { - fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/README.txt" - mito_name = "MT" - macs_gsize = "1.87e9" - blacklist = "${projectDir}/assets/blacklists/GRCm38-blacklist.bed" - } - 'TAIR10' { - fasta = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/README.txt" - mito_name = "Mt" - } - 'EB2' { - fasta = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/README.txt" - } - 'UMD3.1' { - fasta = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/README.txt" - mito_name = "MT" - } - 'WBcel235' { - fasta = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed" - mito_name = "MtDNA" - macs_gsize = "9e7" - } - 'CanFam3.1' { - fasta = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/README.txt" - mito_name = "MT" - } - 'GRCz10' { - fasta = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed" - mito_name = "MT" - } - 'BDGP6' { - fasta = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed" - mito_name = "M" - macs_gsize = "1.2e8" - } - 'EquCab2' { - fasta = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/README.txt" - mito_name = "MT" - } - 'EB1' { - fasta = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/README.txt" - } - 'Galgal4' { - fasta = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed" - mito_name = "MT" - } - 'Gm01' { - fasta = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/README.txt" - } - 'Mmul_1' { - fasta = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/README.txt" - mito_name = "MT" - } - 'IRGSP-1.0' { - fasta = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed" - mito_name = "Mt" - } - 'CHIMP2.1.4' { - fasta = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/README.txt" - mito_name = "MT" - } - 'Rnor_5.0' { - fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_5.0/Annotation/Genes/genes.bed" - mito_name = "MT" - } - 'Rnor_6.0' { - fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed" - mito_name = "MT" - } - 'R64-1-1' { - fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed" - mito_name = "MT" - macs_gsize = "1.2e7" - } - 'EF2' { - fasta = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/README.txt" - mito_name = "MT" - macs_gsize = "1.21e7" - } - 'Sbi1' { - fasta = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/README.txt" - } - 'Sscrofa10.2' { - fasta = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/README.txt" - mito_name = "MT" - } - 'AGPv3' { - fasta = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed" - mito_name = "Mt" - } - 'hg38' { - fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg38/Annotation/Genes/genes.bed" - mito_name = "chrM" - macs_gsize = "2.7e9" - blacklist = "${projectDir}/assets/blacklists/hg38-blacklist.bed" - } - 'hg19' { - fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Homo_sapiens/UCSC/hg19/Annotation/README.txt" - mito_name = "chrM" - macs_gsize = "2.7e9" - blacklist = "${projectDir}/assets/blacklists/hg19-blacklist.bed" - } - 'mm10' { - fasta = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Mus_musculus/UCSC/mm10/Annotation/README.txt" - mito_name = "chrM" - macs_gsize = "1.87e9" - blacklist = "${projectDir}/assets/blacklists/mm10-blacklist.bed" - } - 'bosTau8' { - fasta = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Bos_taurus/UCSC/bosTau8/Annotation/Genes/genes.bed" - mito_name = "chrM" - } - 'ce10' { - fasta = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Caenorhabditis_elegans/UCSC/ce10/Annotation/README.txt" - mito_name = "chrM" - macs_gsize = "9e7" - } - 'canFam3' { - fasta = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Canis_familiaris/UCSC/canFam3/Annotation/README.txt" - mito_name = "chrM" - } - 'danRer10' { - fasta = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Danio_rerio/UCSC/danRer10/Annotation/Genes/genes.bed" - mito_name = "chrM" - macs_gsize = "1.37e9" - } - 'dm6' { - fasta = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Drosophila_melanogaster/UCSC/dm6/Annotation/Genes/genes.bed" - mito_name = "chrM" - macs_gsize = "1.2e8" - } - 'equCab2' { - fasta = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Equus_caballus/UCSC/equCab2/Annotation/README.txt" - mito_name = "chrM" - } - 'galGal4' { - fasta = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Gallus_gallus/UCSC/galGal4/Annotation/README.txt" - mito_name = "chrM" - } - 'panTro4' { - fasta = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Pan_troglodytes/UCSC/panTro4/Annotation/README.txt" - mito_name = "chrM" - } - 'rn6' { - fasta = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Rattus_norvegicus/UCSC/rn6/Annotation/Genes/genes.bed" - mito_name = "chrM" - } - 'sacCer3' { - fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Sequence/BismarkIndex/" - readme = "${params.igenomes_base}/Saccharomyces_cerevisiae/UCSC/sacCer3/Annotation/README.txt" - mito_name = "chrM" - macs_gsize = "1.2e7" - } - 'susScr3' { - fasta = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/WholeGenomeFasta/genome.fa" - bwa = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BWAIndex/genome.fa" - bowtie2 = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/Bowtie2Index/" - star = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/STARIndex/" - bismark = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Sequence/BismarkIndex/" - gtf = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.gtf" - bed12 = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/Genes/genes.bed" - readme = "${params.igenomes_base}/Sus_scrofa/UCSC/susScr3/Annotation/README.txt" - mito_name = "chrM" - } - } -} From bf689e24cafa07d7d505ced6c4bc22150fbd6025 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 13:55:21 +0100 Subject: [PATCH 037/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index d109e3df..395ee964 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -24,7 +24,7 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { output: tuple val(meta), path("*predicted_peptides_class_1.csv"), emit: csv - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_predicted_peptides_class_1" From e222908724ced2ec02e4663c4fa6c9b70180f718 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 13:56:12 +0100 Subject: [PATCH 038/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index 395ee964..4a9aa3c0 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -23,8 +23,8 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { tuple val(meta), path(mztab), val(alleles) output: - tuple val(meta), path("*predicted_peptides_class_1.csv"), emit: csv - path "versions.yml" , emit: versions + tuple val(meta), path("*.csv"), emit: csv + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_predicted_peptides_class_1" From 6a13e82b08c5dc169288710167e8ada4fc6a1b87 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 13:59:28 +0100 Subject: [PATCH 039/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index 4a9aa3c0..5507de09 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -23,8 +23,8 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { tuple val(meta), path(mztab), val(alleles) output: - tuple val(meta), path("*.csv"), emit: csv - path "versions.yml" , emit: versions + tuple val(meta), path("${prefix}.csv") , emit: csv + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_predicted_peptides_class_1" From b8cf10279a647c8ce4ef704b9ca8a0d2668a8011 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 14:05:49 +0100 Subject: [PATCH 040/227] Update generate_proteins_from_vcf.nf --- modules/local/generate_proteins_from_vcf.nf | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index e63e45e4..bc90b50c 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -32,13 +32,13 @@ process GENERATE_PROTEINS_FROM_VCF { script: def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_added_vcf" - """ - variants2fasta.py -v ${vcf} -f ${fasta} -o ${meta.sample}_${prefix}.fasta $options.args - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - END_VERSIONS - """ + """ + variants2fasta.py -v ${vcf} -f ${fasta} -o ${meta.sample}_${prefix}.fasta $options.args + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + END_VERSIONS + """ } From c8ee16a05e87608d89e07d9712164a0bea6d1332 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 14:06:35 +0100 Subject: [PATCH 041/227] Update get_software_versions.nf --- modules/local/get_software_versions.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/local/get_software_versions.nf b/modules/local/get_software_versions.nf index 044bf4bd..a239b4e7 100644 --- a/modules/local/get_software_versions.nf +++ b/modules/local/get_software_versions.nf @@ -26,9 +26,9 @@ process GET_SOFTWARE_VERSIONS { path 'software_versions_mqc.yaml', emit: yaml script: // This script is bundled with the pipeline, in nf-core/mhcquant/bin/ - """ - echo $workflow.manifest.version > pipeline.version.txt - echo $workflow.nextflow.version > nextflow.version.txt - scrape_software_versions.py &> software_versions_mqc.yaml - """ + """ + echo $workflow.manifest.version > pipeline.version.txt + echo $workflow.nextflow.version > nextflow.version.txt + scrape_software_versions.py &> software_versions_mqc.yaml + """ } From 49eebe52589a6c0f1a362a888b048adb5e3ea65c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 14:55:35 +0100 Subject: [PATCH 042/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index 5507de09..8a44fe2b 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -23,7 +23,7 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { tuple val(meta), path(mztab), val(alleles) output: - tuple val(meta), path("${prefix}.csv") , emit: csv + tuple val(meta), path("*.csv") , emit: csv path "versions.yml" , emit: versions script: From 624b4e2088e911a3976436b215195436f3473350 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 14:58:11 +0100 Subject: [PATCH 043/227] Update mhcflurry_predictpsms.nf --- modules/local/mhcflurry_predictpsms.nf | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index 42096e89..06772821 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -29,14 +29,14 @@ process MHCFLURRY_PREDICTPSMS { script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_peptide_filter" - """ - mhcflurry-downloads --quiet fetch models_class1 - mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '${allotypes}' ${perc_mztab} ${psm_mztab} ${prefix}.idXML - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - END_VERSIONS - """ + """ + mhcflurry-downloads --quiet fetch models_class1 + mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '${allotypes}' ${perc_mztab} ${psm_mztab} ${prefix}.idXML + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + END_VERSIONS + """ } From b5828604b4bbcf6634a20c6bffdaf1c4706ff5ca Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 14:59:13 +0100 Subject: [PATCH 044/227] Update mhcflurry_predictneoepitopesclass1.nf --- modules/local/mhcflurry_predictneoepitopesclass1.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/local/mhcflurry_predictneoepitopesclass1.nf b/modules/local/mhcflurry_predictneoepitopesclass1.nf index 8b4520d5..5a398d0c 100644 --- a/modules/local/mhcflurry_predictneoepitopesclass1.nf +++ b/modules/local/mhcflurry_predictneoepitopesclass1.nf @@ -24,8 +24,8 @@ process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { tuple val(meta), val(allotypes), path(neoepitopes) output: - tuple val(meta), path("*.csv"), emit: csv - path "versions.yml", emit: versions + tuple val(meta), path("*.csv") , emit: csv + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${neoepitopes}_${meta}_${options.suffix}" : "${neoepitopes}_${meta}_predicted_neoepitopes_class_1" From 1b52f67a5a2993166bbc513c8a3d5f11a05dd339 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:00:09 +0100 Subject: [PATCH 045/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index 8a44fe2b..193f6710 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -24,7 +24,7 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { output: tuple val(meta), path("*.csv") , emit: csv - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_predicted_peptides_class_1" From 5860c298df277ef20b5a65e8378e2bc55120c9ee Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:00:48 +0100 Subject: [PATCH 046/227] Update mhcnuggets_neoepitopesclass2post.nf --- modules/local/mhcnuggets_neoepitopesclass2post.nf | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2post.nf b/modules/local/mhcnuggets_neoepitopesclass2post.nf index c570c7a6..9eb6c960 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2post.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2post.nf @@ -24,14 +24,13 @@ process MHCNUGGETS_NEOEPITOPESCLASS2POST { tuple val(meta), path(neoepitopes), path(predicted) output: - tuple val(meta), path("*.csv"), emit: csv - path "versions.yml", emit: versions + tuple val(meta), path("*.csv") , emit: csv + path "versions.yml" , emit: versions script: """ postprocess_neoepitopes_mhcnuggets.py --input ${predicted} --neoepitopes ${neoepitopes} - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 217a16972c95376c5ad8d53c6f19543ed8af8f26 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:01:13 +0100 Subject: [PATCH 047/227] Update mhcflurry_predictpsms.nf --- modules/local/mhcflurry_predictpsms.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index 06772821..07c63635 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -32,7 +32,6 @@ process MHCFLURRY_PREDICTPSMS { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '${allotypes}' ${perc_mztab} ${psm_mztab} ${prefix}.idXML - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 7ab263cfcce2d47e0330ab392e17c531ba9ff26d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:01:45 +0100 Subject: [PATCH 048/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index 193f6710..eddecd34 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -32,7 +32,6 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_predict_mztab.py '${alleles}' ${mztab} ${prefix}.csv - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From b04ecd02f257b32955a0bddfd7685ad9bad7a7ad Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:02:30 +0100 Subject: [PATCH 049/227] Update mhcflurry_predictneoepitopesclass1.nf --- modules/local/mhcflurry_predictneoepitopesclass1.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/local/mhcflurry_predictneoepitopesclass1.nf b/modules/local/mhcflurry_predictneoepitopesclass1.nf index 5a398d0c..a3547a6c 100644 --- a/modules/local/mhcflurry_predictneoepitopesclass1.nf +++ b/modules/local/mhcflurry_predictneoepitopesclass1.nf @@ -33,7 +33,6 @@ process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_neoepitope_binding_prediction.py '${allotypes}' ${prefix}.csv - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 09429565c2851d7b90c0c00fb74cc564f6d32dc8 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:03:43 +0100 Subject: [PATCH 050/227] Update mhcnuggets_neoepitopesclass2pre.nf --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index 0e2cab71..25e92321 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -20,14 +20,13 @@ process MHCNUGGETS_NEOEPITOPESCLASS2RE { output: tuple val(meta), path("*${prefix}*"), emit: preprocessed - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_mhcnuggets_preprocessed" """ preprocess_neoepitopes_mhcnuggets.py --neoepitopes ${neoepitopes} --output ${prefix} - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From b76bffe410b324ac48089d8cd0859740784ddd8e Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:04:21 +0100 Subject: [PATCH 051/227] Update mhcnuggets_peptidesclass2post.nf --- modules/local/mhcnuggets_peptidesclass2post.nf | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/modules/local/mhcnuggets_peptidesclass2post.nf b/modules/local/mhcnuggets_peptidesclass2post.nf index 5f8ebd96..0c991c2a 100644 --- a/modules/local/mhcnuggets_peptidesclass2post.nf +++ b/modules/local/mhcnuggets_peptidesclass2post.nf @@ -23,15 +23,14 @@ process MHCNUGGETS_PEPTIDESCLASS2POST { tuple val(meta), path(peptides), path(peptide_to_geneID) output: - tuple val(meta), path('*.csv'), emit: csv - path "versions.yml", emit: versions + tuple val(meta), path('*.csv') , emit: csv + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_postprocessed" """ postprocess_peptides_mhcnuggets.py --input ${peptides} --peptides_seq_ID ${peptide_to_geneID} --output ${prefix}.csv - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 4b48df204e90ff751da3cfd87836b838f0afa717 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:05:03 +0100 Subject: [PATCH 052/227] Update modules/local/mhcnuggets_peptidesclass2pre.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/local/mhcnuggets_peptidesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index c60263f2..67a0a3ce 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -19,7 +19,7 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { tuple val(meta), path(mztab) output: - tuple val(meta), path("*_preprocessed_mhcnuggets_peptides"), emit: preprocessed + tuple val(meta), path("*${prefix}"), emit: preprocessed tuple val(meta), path('*peptide_to_geneID*'), emit: geneID path "versions.yml", emit: versions From 5c5e0108c5e7436ef60507742aaf06ac64d0b200 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:06:06 +0100 Subject: [PATCH 053/227] Update mhcnuggets_peptidesclass2pre.nf --- modules/local/mhcnuggets_peptidesclass2pre.nf | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index 67a0a3ce..44aa3c28 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -19,7 +19,7 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { tuple val(meta), path(mztab) output: - tuple val(meta), path("*${prefix}"), emit: preprocessed + tuple val(meta), path("*${prefix}*"), emit: preprocessed tuple val(meta), path('*peptide_to_geneID*'), emit: geneID path "versions.yml", emit: versions @@ -28,7 +28,6 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { """ preprocess_peptides_mhcnuggets.py --mztab ${mztab} --output ${prefix} - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From ca567281112116b5b66037934eef1e6d4bc3cc5d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:07:11 +0100 Subject: [PATCH 054/227] Update mhcnuggets_predictneoepitopesclass2.nf --- modules/local/mhcnuggets_predictneoepitopesclass2.nf | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index ef201fb5..02670d65 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -19,7 +19,7 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { tuple val(meta), path(neoepitopes), val(alleles) output: - tuple val(meta), path("*_predicted_neoepitopes_class_2"), emit: csv + tuple val(meta), path("*${prefix}*"), emit: csv path "versions.yml", emit: versions script: @@ -27,7 +27,6 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { """ mhcnuggets_predict_peptides.py --peptides ${neoepitopes} --alleles '${alleles}' --output ${prefix} - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 63c0a6c66ef3cebce78dfb6fb07cf859b8a1fdc6 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:08:12 +0100 Subject: [PATCH 055/227] Update mhcnuggets_predictpeptidesclass2.nf --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index af8d0fa7..3905667b 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -19,7 +19,7 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { tuple val(meta), path(peptides), val(alleles) output: - tuple val(meta), path("*_predicted_peptides_class_2"), emit: csv + tuple val(meta), path("*${prefix}"), emit: csv path "versions.yml", emit: versions script: @@ -27,7 +27,6 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { """ mhcnuggets_predict_peptides.py --peptides ${peptides} --alleles '${alleles}' --output ${prefix} - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 20832db38cfcea931266a5ab419a6d8d13ff9474 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:09:00 +0100 Subject: [PATCH 056/227] Update mhcnuggets_predictneoepitopesclass2.nf --- modules/local/mhcnuggets_predictneoepitopesclass2.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index 02670d65..4bbd8e29 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -20,7 +20,7 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { output: tuple val(meta), path("*${prefix}*"), emit: csv - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_predicted_neoepitopes_class_2" From 76758631a260cc302d4982befcb6f98f8d8bc728 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:09:33 +0100 Subject: [PATCH 057/227] Update mhcnuggets_peptidesclass2pre.nf --- modules/local/mhcnuggets_peptidesclass2pre.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index 44aa3c28..1b05c9dd 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -19,9 +19,9 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { tuple val(meta), path(mztab) output: - tuple val(meta), path("*${prefix}*"), emit: preprocessed + tuple val(meta), path("*${prefix}*") , emit: preprocessed tuple val(meta), path('*peptide_to_geneID*'), emit: geneID - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_preprocessed_mhcnuggets_peptides" From cd93c40365d538f03c9ba523f6ed88d7def8f7f6 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:10:17 +0100 Subject: [PATCH 058/227] Update mhcnuggets_predictpeptidesclass2.nf --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 3905667b..3939361d 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -19,7 +19,7 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { tuple val(meta), path(peptides), val(alleles) output: - tuple val(meta), path("*${prefix}"), emit: csv + tuple val(meta), path("*${prefix}*"), emit: csv path "versions.yml", emit: versions script: From f4a99bee468b0c3974ae56bf81b06d56574e750a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:11:53 +0100 Subject: [PATCH 059/227] Update modules/local/openms_cometadapter.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/local/openms_cometadapter.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_cometadapter.nf b/modules/local/openms_cometadapter.nf index 9ed1d1af..2cdad059 100644 --- a/modules/local/openms_cometadapter.nf +++ b/modules/local/openms_cometadapter.nf @@ -29,7 +29,7 @@ process OPENMS_COMETADAPTER { """ CometAdapter -in ${mzml} \\ -out ${prefix}.idXML \\ - -database ${fasta} \\ + -database $fasta \\ -threads $task.cpus $options.args cat <<-END_VERSIONS > versions.yml From 9a3c40b57a32a89bbbc45cbadd92abbd209faead Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:13:06 +0100 Subject: [PATCH 060/227] Update generate_proteins_from_vcf.nf --- modules/local/generate_proteins_from_vcf.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index bc90b50c..343afff1 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -33,7 +33,7 @@ process GENERATE_PROTEINS_FROM_VCF { def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_added_vcf" """ - variants2fasta.py -v ${vcf} -f ${fasta} -o ${meta.sample}_${prefix}.fasta $options.args + variants2fasta.py -v ${vcf} -f $fasta -o $meta.sample_${prefix}.fasta $options.args cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) From a88720d0bd4bbec43a4bfe06e78f41c5aca8cb2a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:13:43 +0100 Subject: [PATCH 061/227] Update mhcflurry_predictneoepitopesclass1.nf --- modules/local/mhcflurry_predictneoepitopesclass1.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictneoepitopesclass1.nf b/modules/local/mhcflurry_predictneoepitopesclass1.nf index a3547a6c..889b9963 100644 --- a/modules/local/mhcflurry_predictneoepitopesclass1.nf +++ b/modules/local/mhcflurry_predictneoepitopesclass1.nf @@ -32,7 +32,7 @@ process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { """ mhcflurry-downloads --quiet fetch models_class1 - mhcflurry_neoepitope_binding_prediction.py '${allotypes}' ${prefix}.csv + mhcflurry_neoepitope_binding_prediction.py '$allotypes' ${prefix}.csv cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 3828273b1e339e620a13e579ab39575613835c9c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:15:24 +0100 Subject: [PATCH 062/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index eddecd34..a61501a0 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -31,7 +31,7 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { """ mhcflurry-downloads --quiet fetch models_class1 - mhcflurry_predict_mztab.py '${alleles}' ${mztab} ${prefix}.csv + mhcflurry_predict_mztab.py '$alleles' $mztab ${prefix}.csv cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 62fd17fe26c628266b2c39ea9862c2114bfc809f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:16:56 +0100 Subject: [PATCH 063/227] Update mhcflurry_predictpsms.nf --- modules/local/mhcflurry_predictpsms.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index 07c63635..617be7f8 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -31,7 +31,7 @@ process MHCFLURRY_PREDICTPSMS { """ mhcflurry-downloads --quiet fetch models_class1 - mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '${allotypes}' ${perc_mztab} ${psm_mztab} ${prefix}.idXML + mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '$allotypes' $perc_mztab $psm_mztab ${prefix}.idXML cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 4fde59d439731d28e4c0c6180f2712aa95eaa920 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:17:31 +0100 Subject: [PATCH 064/227] Update mhcnuggets_neoepitopesclass2post.nf --- modules/local/mhcnuggets_neoepitopesclass2post.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2post.nf b/modules/local/mhcnuggets_neoepitopesclass2post.nf index 9eb6c960..8264c770 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2post.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2post.nf @@ -30,7 +30,7 @@ process MHCNUGGETS_NEOEPITOPESCLASS2POST { script: """ - postprocess_neoepitopes_mhcnuggets.py --input ${predicted} --neoepitopes ${neoepitopes} + postprocess_neoepitopes_mhcnuggets.py --input $predicted --neoepitopes $neoepitopes cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 41f31dda8ba076ae7973ae1a86e3d40ac31b60bf Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:18:31 +0100 Subject: [PATCH 065/227] Update nextflow.config Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nextflow.config b/nextflow.config index 99379ce1..d414d6d6 100644 --- a/nextflow.config +++ b/nextflow.config @@ -214,7 +214,7 @@ manifest { description = 'Identify and quantify peptides from mass spectrometry raw data' mainScript = 'main.nf' nextflowVersion = '!>=21.04.0' - version = '2.1.0dev' + version = '2.1.0' } // Function to ensure that resource requirements don't go beyond From d3e4a8534819f3ee1e031219b8b44af2362c653a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:20:25 +0100 Subject: [PATCH 066/227] Update openms_idfilter.nf --- modules/local/openms_idfilter.nf | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/modules/local/openms_idfilter.nf b/modules/local/openms_idfilter.nf index 9e3f474c..cbb532e5 100644 --- a/modules/local/openms_idfilter.nf +++ b/modules/local/openms_idfilter.nf @@ -36,11 +36,10 @@ process OPENMS_IDFILTER { } """ - IDFilter -in ${idxml} \\ + IDFilter -in $idxml \\ -out ${prefix}.idXML \\ - -threads ${task.cpus} \\ - $options.args ${whitelist} - + -threads $task.cpus \\ + $options.args $whitelist cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From b19c8dd533b7db24b12cd19ba02c5c7fb6243e27 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:21:12 +0100 Subject: [PATCH 067/227] Update modules/local/openms_featurefinderidentification.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/local/openms_featurefinderidentification.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_featurefinderidentification.nf b/modules/local/openms_featurefinderidentification.nf index f4724f19..39251506 100644 --- a/modules/local/openms_featurefinderidentification.nf +++ b/modules/local/openms_featurefinderidentification.nf @@ -40,7 +40,7 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { FeatureFinderIdentification -in ${mzml} \\ -out ${prefix}.featureXML \\ -threads ${task.cpus} \\ - $arguments + $options.args cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: From fe46dac1fc129db13d29daed7a67163eacd8e3b9 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:21:43 +0100 Subject: [PATCH 068/227] Update mhcnuggets_predictpeptidesclass2.nf --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 3939361d..52ca02da 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -26,7 +26,7 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_predicted_peptides_class_2" """ - mhcnuggets_predict_peptides.py --peptides ${peptides} --alleles '${alleles}' --output ${prefix} + mhcnuggets_predict_peptides.py --peptides $peptides --alleles '$alleles' --output ${prefix} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 9445bcb44defc4770f0d2096b3f75e608e64ec73 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:22:14 +0100 Subject: [PATCH 069/227] Update modules/local/openms_cometadapter.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/local/openms_cometadapter.nf | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/local/openms_cometadapter.nf b/modules/local/openms_cometadapter.nf index 2cdad059..4e1f93e6 100644 --- a/modules/local/openms_cometadapter.nf +++ b/modules/local/openms_cometadapter.nf @@ -30,7 +30,8 @@ process OPENMS_COMETADAPTER { CometAdapter -in ${mzml} \\ -out ${prefix}.idXML \\ -database $fasta \\ - -threads $task.cpus $options.args + -threads $task.cpus \\ + $options.args cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: From 5bec9c959b7be52b5e564d5fff34a58caaad147c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:22:38 +0100 Subject: [PATCH 070/227] Update openms_cometadapter.nf --- modules/local/openms_cometadapter.nf | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/local/openms_cometadapter.nf b/modules/local/openms_cometadapter.nf index 4e1f93e6..4111a7a5 100644 --- a/modules/local/openms_cometadapter.nf +++ b/modules/local/openms_cometadapter.nf @@ -27,12 +27,11 @@ process OPENMS_COMETADAPTER { def prefix = options.suffix ? "${mzml.baseName}_${options.suffix}" : "${mzml.baseName}" """ - CometAdapter -in ${mzml} \\ + CometAdapter -in $mzml \\ -out ${prefix}.idXML \\ -database $fasta \\ -threads $task.cpus \\ $options.args - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From a7c909847b80ff7d1c2f14cc620dcbeea083d829 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:23:52 +0100 Subject: [PATCH 071/227] Update generate_proteins_from_vcf.nf --- modules/local/generate_proteins_from_vcf.nf | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index 343afff1..d1070476 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -33,12 +33,12 @@ process GENERATE_PROTEINS_FROM_VCF { def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_added_vcf" """ - variants2fasta.py -v ${vcf} -f $fasta -o $meta.sample_${prefix}.fasta $options.args - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - END_VERSIONS + variants2fasta.py -v ${vcf} -f $fasta -o $meta.sample_${prefix}.fasta $options.args + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + END_VERSIONS """ } From 8f2b0e1ef6be48f6fa34622a9957913ea1af303c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:28:12 +0100 Subject: [PATCH 072/227] Update get_software_versions.nf --- modules/local/get_software_versions.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/local/get_software_versions.nf b/modules/local/get_software_versions.nf index a239b4e7..647ea324 100644 --- a/modules/local/get_software_versions.nf +++ b/modules/local/get_software_versions.nf @@ -27,8 +27,8 @@ process GET_SOFTWARE_VERSIONS { script: // This script is bundled with the pipeline, in nf-core/mhcquant/bin/ """ - echo $workflow.manifest.version > pipeline.version.txt - echo $workflow.nextflow.version > nextflow.version.txt - scrape_software_versions.py &> software_versions_mqc.yaml + echo $workflow.manifest.version > pipeline.version.txt + echo $workflow.nextflow.version > nextflow.version.txt + scrape_software_versions.py &> software_versions_mqc.yaml """ } From 1662d12307c72d3c31bb93cf888b00eee91283f1 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:28:46 +0100 Subject: [PATCH 073/227] Update mhcflurry_predictneoepitopesclass1.nf --- modules/local/mhcflurry_predictneoepitopesclass1.nf | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/local/mhcflurry_predictneoepitopesclass1.nf b/modules/local/mhcflurry_predictneoepitopesclass1.nf index 889b9963..c8cc5624 100644 --- a/modules/local/mhcflurry_predictneoepitopesclass1.nf +++ b/modules/local/mhcflurry_predictneoepitopesclass1.nf @@ -31,11 +31,11 @@ process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { def prefix = options.suffix ? "${neoepitopes}_${meta}_${options.suffix}" : "${neoepitopes}_${meta}_predicted_neoepitopes_class_1" """ - mhcflurry-downloads --quiet fetch models_class1 - mhcflurry_neoepitope_binding_prediction.py '$allotypes' ${prefix}.csv - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - END_VERSIONS + mhcflurry-downloads --quiet fetch models_class1 + mhcflurry_neoepitope_binding_prediction.py '$allotypes' ${prefix}.csv + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + END_VERSIONS """ } From 6544efde7f88b07d3ebc9fb2a7e7d1ea3cbd5e9e Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:29:15 +0100 Subject: [PATCH 074/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index a61501a0..751a8bf2 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -30,13 +30,13 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_predicted_peptides_class_1" """ - mhcflurry-downloads --quiet fetch models_class1 - mhcflurry_predict_mztab.py '$alleles' $mztab ${prefix}.csv - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) - END_VERSIONS + mhcflurry-downloads --quiet fetch models_class1 + mhcflurry_predict_mztab.py '$alleles' $mztab ${prefix}.csv + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) + END_VERSIONS """ } From 12449ec8713a2a159ce5193f680e0b978ab8372f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:29:49 +0100 Subject: [PATCH 075/227] Update mhcflurry_predictpsms.nf --- modules/local/mhcflurry_predictpsms.nf | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index 617be7f8..27e6a3cf 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -30,12 +30,12 @@ process MHCFLURRY_PREDICTPSMS { def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_peptide_filter" """ - mhcflurry-downloads --quiet fetch models_class1 - mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '$allotypes' $perc_mztab $psm_mztab ${prefix}.idXML - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - END_VERSIONS + mhcflurry-downloads --quiet fetch models_class1 + mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '$allotypes' $perc_mztab $psm_mztab ${prefix}.idXML + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + END_VERSIONS """ } From 2890425ca1e6224507b8a7e6b43c8c1ff7f059a9 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:31:14 +0100 Subject: [PATCH 076/227] Update mhcnuggets_neoepitopesclass2post.nf --- modules/local/mhcnuggets_neoepitopesclass2post.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2post.nf b/modules/local/mhcnuggets_neoepitopesclass2post.nf index 8264c770..77d135d4 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2post.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2post.nf @@ -30,10 +30,10 @@ process MHCNUGGETS_NEOEPITOPESCLASS2POST { script: """ - postprocess_neoepitopes_mhcnuggets.py --input $predicted --neoepitopes $neoepitopes - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - END_VERSIONS + postprocess_neoepitopes_mhcnuggets.py --input $predicted --neoepitopes $neoepitopes + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + END_VERSIONS """ } From a889ff84c15b44d65b46fe7d14058c91df74f514 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:31:40 +0100 Subject: [PATCH 077/227] Update mhcnuggets_neoepitopesclass2pre.nf --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index 25e92321..71172309 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -26,11 +26,11 @@ process MHCNUGGETS_NEOEPITOPESCLASS2RE { def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_mhcnuggets_preprocessed" """ - preprocess_neoepitopes_mhcnuggets.py --neoepitopes ${neoepitopes} --output ${prefix} - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - END_VERSIONS + preprocess_neoepitopes_mhcnuggets.py --neoepitopes ${neoepitopes} --output ${prefix} + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + END_VERSIONS """ } From 01f77540c2ef08f5ccc614d4a6ffafd6f14ca02c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:32:05 +0100 Subject: [PATCH 078/227] Update mhcnuggets_peptidesclass2post.nf --- modules/local/mhcnuggets_peptidesclass2post.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/local/mhcnuggets_peptidesclass2post.nf b/modules/local/mhcnuggets_peptidesclass2post.nf index 0c991c2a..dec01ef7 100644 --- a/modules/local/mhcnuggets_peptidesclass2post.nf +++ b/modules/local/mhcnuggets_peptidesclass2post.nf @@ -30,10 +30,10 @@ process MHCNUGGETS_PEPTIDESCLASS2POST { def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_postprocessed" """ - postprocess_peptides_mhcnuggets.py --input ${peptides} --peptides_seq_ID ${peptide_to_geneID} --output ${prefix}.csv - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - END_VERSIONS + postprocess_peptides_mhcnuggets.py --input ${peptides} --peptides_seq_ID ${peptide_to_geneID} --output ${prefix}.csv + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + END_VERSIONS """ } From dfb4a74d6f307368a20321a88c63a70ff442743a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:33:05 +0100 Subject: [PATCH 079/227] Update mhcnuggets_neoepitopesclass2pre.nf --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index 71172309..6f4c97b1 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -33,5 +33,3 @@ process MHCNUGGETS_NEOEPITOPESCLASS2RE { END_VERSIONS """ } - -// ${getSoftwareName(task.process)}: \$(echo $VERSION) From bdcf9dfbc6587c840710760c0a4400b8c1f9a777 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:34:01 +0100 Subject: [PATCH 080/227] Update mhcnuggets_peptidesclass2pre.nf --- modules/local/mhcnuggets_peptidesclass2pre.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index 1b05c9dd..2cd5732a 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -27,10 +27,10 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_preprocessed_mhcnuggets_peptides" """ - preprocess_peptides_mhcnuggets.py --mztab ${mztab} --output ${prefix} - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - END_VERSIONS + preprocess_peptides_mhcnuggets.py --mztab ${mztab} --output ${prefix} + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + END_VERSIONS """ } From 4f8fffaa7a52870cc9ab33f4c0cc4ca050acd9bb Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:34:29 +0100 Subject: [PATCH 081/227] Update mhcnuggets_predictneoepitopesclass2.nf --- modules/local/mhcnuggets_predictneoepitopesclass2.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index 4bbd8e29..9a04fc97 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -26,10 +26,10 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_predicted_neoepitopes_class_2" """ - mhcnuggets_predict_peptides.py --peptides ${neoepitopes} --alleles '${alleles}' --output ${prefix} - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - END_VERSIONS + mhcnuggets_predict_peptides.py --peptides ${neoepitopes} --alleles '${alleles}' --output ${prefix} + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + END_VERSIONS """ } From 13de6a8148139b006e4df62ce1f8172ca50390bc Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:35:11 +0100 Subject: [PATCH 082/227] Update mhcnuggets_predictpeptidesclass2.nf --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 52ca02da..5d34afd1 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -20,17 +20,17 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { output: tuple val(meta), path("*${prefix}*"), emit: csv - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_predicted_peptides_class_2" """ - mhcnuggets_predict_peptides.py --peptides $peptides --alleles '$alleles' --output ${prefix} - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) - END_VERSIONS + mhcnuggets_predict_peptides.py --peptides $peptides --alleles '$alleles' --output ${prefix} + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) + END_VERSIONS """ } From 834ef358202f8989967f9d8098d03970504f5c15 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:35:42 +0100 Subject: [PATCH 083/227] Update openms_cometadapter.nf --- modules/local/openms_cometadapter.nf | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/local/openms_cometadapter.nf b/modules/local/openms_cometadapter.nf index 4111a7a5..b9c0d0d2 100644 --- a/modules/local/openms_cometadapter.nf +++ b/modules/local/openms_cometadapter.nf @@ -20,21 +20,21 @@ process OPENMS_COMETADAPTER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${mzml.baseName}_${options.suffix}" : "${mzml.baseName}" """ - CometAdapter -in $mzml \\ - -out ${prefix}.idXML \\ - -database $fasta \\ - -threads $task.cpus \\ - $options.args - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + CometAdapter -in $mzml \\ + -out ${prefix}.idXML \\ + -database $fasta \\ + -threads $task.cpus \\ + $options.args + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From f62f9ba7ff872fbead2e726a5a6104261788b819 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:37:28 +0100 Subject: [PATCH 084/227] Update openms_decoydatabase.nf --- modules/local/openms_decoydatabase.nf | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/modules/local/openms_decoydatabase.nf b/modules/local/openms_decoydatabase.nf index 8aee9931..3103a4f8 100644 --- a/modules/local/openms_decoydatabase.nf +++ b/modules/local/openms_decoydatabase.nf @@ -19,19 +19,18 @@ process OPENMS_DECOYDATABASE { tuple val(meta), path(fasta) output: - tuple val(meta), path("*_decoy.fasta"), emit: decoy - path "versions.yml", emit: versions + tuple val(meta), path("*.fasta") , emit: decoy + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_decoy" """ - DecoyDatabase -in ${fasta} \\ + DecoyDatabase -in $fasta} \\ -out ${prefix}.fasta \\ -decoy_string DECOY_ \\ -decoy_string_position prefix - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 05b00943ab9fab5b73c910f8f041035d70db3a9b Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:38:08 +0100 Subject: [PATCH 085/227] Update openms_falsediscoveryrate.nf --- modules/local/openms_falsediscoveryrate.nf | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/modules/local/openms_falsediscoveryrate.nf b/modules/local/openms_falsediscoveryrate.nf index 8925c634..4c9d1116 100644 --- a/modules/local/openms_falsediscoveryrate.nf +++ b/modules/local/openms_falsediscoveryrate.nf @@ -20,21 +20,20 @@ process OPENMS_FALSEDISCOVERYRATE { output: tuple val(meta), path("*.idXML"), emit: idxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${idxml.baseName}_${options.suffix}" : "${idxml.baseName}_fdr" """ - FalseDiscoveryRate -in ${idxml} \\ - -protein 'false' \\ - -out ${prefix}.idXML \\ - -threads ${task.cpus} - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + FalseDiscoveryRate -in $idxml \\ + -protein 'false' \\ + -out ${prefix}.idXML \\ + -threads $task.cpus + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 796e2697aec4b3edd5a3ae521cd98f3f4bf34d41 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:38:57 +0100 Subject: [PATCH 086/227] Update generate_proteins_from_vcf.nf --- modules/local/generate_proteins_from_vcf.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index d1070476..99ffa3ee 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -33,7 +33,7 @@ process GENERATE_PROTEINS_FROM_VCF { def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_added_vcf" """ - variants2fasta.py -v ${vcf} -f $fasta -o $meta.sample_${prefix}.fasta $options.args + variants2fasta.py -v $vcf -f $fasta -o $meta.sample_${prefix}.fasta $options.args cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) From 329b25f61613eb15877fa4b10b5c6bb60bbc16bd Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:39:20 +0100 Subject: [PATCH 087/227] Update generate_proteins_from_vcf.nf --- modules/local/generate_proteins_from_vcf.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index 99ffa3ee..01568e0e 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -26,7 +26,7 @@ process GENERATE_PROTEINS_FROM_VCF { tuple val(meta), path(fasta), path(vcf) output: - tuple val(meta), path("*_vcf.fasta"), emit: vcf_fasta + tuple val(meta), path("*.fasta"), emit: vcf_fasta path "*.version.txt", emit: version script: From 4030d730d24dddd3fe706a30f1ad6657db521f64 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:40:24 +0100 Subject: [PATCH 088/227] Update mhcflurry_predictpsms.nf --- modules/local/mhcflurry_predictpsms.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index 27e6a3cf..5d8beedc 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -24,7 +24,7 @@ process MHCFLURRY_PREDICTPSMS { output: tuple val(meta), path("*.idXML"), emit: idxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_peptide_filter" From 1cdeb421e4c4e4d7eed9815af602e553393b77ec Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:41:27 +0100 Subject: [PATCH 089/227] Update generate_proteins_from_vcf.nf --- modules/local/generate_proteins_from_vcf.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index 01568e0e..f00f57dd 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -27,7 +27,7 @@ process GENERATE_PROTEINS_FROM_VCF { output: tuple val(meta), path("*.fasta"), emit: vcf_fasta - path "*.version.txt", emit: version + path "*.version.txt" , emit: version script: def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_added_vcf" From a815036549ec526209087e6d1cc42b076e3d5cf0 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:43:26 +0100 Subject: [PATCH 090/227] Update mhcnuggets_neoepitopesclass2pre.nf --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index 6f4c97b1..e0e0ecd0 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -26,7 +26,7 @@ process MHCNUGGETS_NEOEPITOPESCLASS2RE { def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_mhcnuggets_preprocessed" """ - preprocess_neoepitopes_mhcnuggets.py --neoepitopes ${neoepitopes} --output ${prefix} + preprocess_neoepitopes_mhcnuggets.py --neoepitopes $neoepitopes --output ${prefix} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 1c90c4dcac6c59d6e8a14b835e233e790fb22a2f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:43:56 +0100 Subject: [PATCH 091/227] Update mhcnuggets_peptidesclass2post.nf --- modules/local/mhcnuggets_peptidesclass2post.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_peptidesclass2post.nf b/modules/local/mhcnuggets_peptidesclass2post.nf index dec01ef7..8f24400e 100644 --- a/modules/local/mhcnuggets_peptidesclass2post.nf +++ b/modules/local/mhcnuggets_peptidesclass2post.nf @@ -30,7 +30,7 @@ process MHCNUGGETS_PEPTIDESCLASS2POST { def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_postprocessed" """ - postprocess_peptides_mhcnuggets.py --input ${peptides} --peptides_seq_ID ${peptide_to_geneID} --output ${prefix}.csv + postprocess_peptides_mhcnuggets.py --input $peptides --peptides_seq_ID $peptide_to_geneID --output ${prefix}.csv cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 9636cae52d8e17be338807fc3635493ca511d9ee Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:44:24 +0100 Subject: [PATCH 092/227] Update mhcnuggets_peptidesclass2pre.nf --- modules/local/mhcnuggets_peptidesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index 2cd5732a..1826b933 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -27,7 +27,7 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_preprocessed_mhcnuggets_peptides" """ - preprocess_peptides_mhcnuggets.py --mztab ${mztab} --output ${prefix} + preprocess_peptides_mhcnuggets.py --mztab $mztab --output ${prefix} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 50252676512ccaace4b01e50bad59dba91eecee7 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:45:57 +0100 Subject: [PATCH 093/227] Update mhcnuggets_predictneoepitopesclass2.nf --- modules/local/mhcnuggets_predictneoepitopesclass2.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index 9a04fc97..2fb2be84 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -26,7 +26,7 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_predicted_neoepitopes_class_2" """ - mhcnuggets_predict_peptides.py --peptides ${neoepitopes} --alleles '${alleles}' --output ${prefix} + mhcnuggets_predict_peptides.py --peptides $neoepitopes --alleles '$alleles' --output ${prefix} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From ce31821c20b019d9ac6503d68f6bf833f700462f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:46:52 +0100 Subject: [PATCH 094/227] Update openms_decoydatabase.nf --- modules/local/openms_decoydatabase.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/local/openms_decoydatabase.nf b/modules/local/openms_decoydatabase.nf index 3103a4f8..25355aad 100644 --- a/modules/local/openms_decoydatabase.nf +++ b/modules/local/openms_decoydatabase.nf @@ -20,14 +20,14 @@ process OPENMS_DECOYDATABASE { output: tuple val(meta), path("*.fasta") , emit: decoy - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_decoy" """ - DecoyDatabase -in $fasta} \\ + DecoyDatabase -in $fasta \\ -out ${prefix}.fasta \\ -decoy_string DECOY_ \\ -decoy_string_position prefix From dd064d0ab0d93e28b42f24e1bdbc863222618c10 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:47:43 +0100 Subject: [PATCH 095/227] Update openms_featurefinderidentification.nf --- .../openms_featurefinderidentification.nf | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/local/openms_featurefinderidentification.nf b/modules/local/openms_featurefinderidentification.nf index 39251506..a249fbf9 100644 --- a/modules/local/openms_featurefinderidentification.nf +++ b/modules/local/openms_featurefinderidentification.nf @@ -24,7 +24,7 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { output: tuple val(meta), path("*.featureXML"), emit: featurexml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) @@ -37,14 +37,14 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { } """ - FeatureFinderIdentification -in ${mzml} \\ - -out ${prefix}.featureXML \\ - -threads ${task.cpus} \\ - $options.args - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + FeatureFinderIdentification -in ${mzml} \\ + -out ${prefix}.featureXML \\ + -threads ${task.cpus} \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 161e016d770cda42205ea30796d0367ea45e3197 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:50:23 +0100 Subject: [PATCH 096/227] Update openms_featurelinkerunlabeledkd.nf --- modules/local/openms_featurelinkerunlabeledkd.nf | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/modules/local/openms_featurelinkerunlabeledkd.nf b/modules/local/openms_featurelinkerunlabeledkd.nf index cf1122b5..bc0a62f5 100644 --- a/modules/local/openms_featurelinkerunlabeledkd.nf +++ b/modules/local/openms_featurelinkerunlabeledkd.nf @@ -31,13 +31,12 @@ process OPENMS_FEATURELINKERUNLABELEDKD { def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_all_features_merged" """ - FeatureLinkerUnlabeledKD -in ${features} \\ - -out '${prefix}.consensusXML' \\ - -threads ${task.cpus} - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + FeatureLinkerUnlabeledKD -in $features \\ + -out '${prefix}.consensusXML' \\ + -threads $task.cpus + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From e59379355517d2034f6ee60a8e58abbbc5b5a691 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:51:20 +0100 Subject: [PATCH 097/227] Update openms_idconflictresolver.nf --- modules/local/openms_idconflictresolver.nf | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/modules/local/openms_idconflictresolver.nf b/modules/local/openms_idconflictresolver.nf index 34f5ace0..3fa65e50 100644 --- a/modules/local/openms_idconflictresolver.nf +++ b/modules/local/openms_idconflictresolver.nf @@ -20,20 +20,19 @@ process OPENMS_IDCONFLICTRESOLVER { output: tuple val(meta), path("*.consensusXML"), emit: consensusxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_resolved" """ - IDConflictResolver -in ${consensus} \\ - -out ${prefix}.consensusXML \\ - -threads ${task.cpus} - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + IDConflictResolver -in ${consensus} \\ + -out ${prefix}.consensusXML \\ + -threads ${task.cpus} + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From be664d26c85935ee6a0fb381b239240d0cb0cdc4 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:52:55 +0100 Subject: [PATCH 098/227] Update openms_idfilter.nf --- modules/local/openms_idfilter.nf | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/modules/local/openms_idfilter.nf b/modules/local/openms_idfilter.nf index cbb532e5..75a6d559 100644 --- a/modules/local/openms_idfilter.nf +++ b/modules/local/openms_idfilter.nf @@ -36,13 +36,14 @@ process OPENMS_IDFILTER { } """ - IDFilter -in $idxml \\ - -out ${prefix}.idXML \\ - -threads $task.cpus \\ - $options.args $whitelist - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + IDFilter -in $idxml \\ + -out ${prefix}.idXML \\ + -threads $task.cpus \\ + $options.args \\ + $whitelist + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From cf272c50baab4d3f15f7e10afe4316c5a04d38e9 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:53:15 +0100 Subject: [PATCH 099/227] Update openms_idfilter.nf --- modules/local/openms_idfilter.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_idfilter.nf b/modules/local/openms_idfilter.nf index 75a6d559..9311623b 100644 --- a/modules/local/openms_idfilter.nf +++ b/modules/local/openms_idfilter.nf @@ -24,7 +24,7 @@ process OPENMS_IDFILTER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) From 81c88235bf8b0094bcc7093f9aab6a2ffc617f7f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:53:43 +0100 Subject: [PATCH 100/227] Update openms_idmerger.nf --- modules/local/openms_idmerger.nf | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/modules/local/openms_idmerger.nf b/modules/local/openms_idmerger.nf index 6877f9d8..424c9e2b 100644 --- a/modules/local/openms_idmerger.nf +++ b/modules/local/openms_idmerger.nf @@ -20,22 +20,22 @@ process OPENMS_IDMERGER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${aligned.baseName}_${options.suffix}" : "${meta.sample}_${meta.condition}_all_ids_merged" """ - IDMerger -in $aligned \\ - -out ${prefix}.idXML \\ - -threads ${task.cpus} \\ - -annotate_file_origin \\ - -merge_proteins_add_PSMs - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + IDMerger -in $aligned \\ + -out ${prefix}.idXML \\ + -threads ${task.cpus} \\ + -annotate_file_origin \\ + -merge_proteins_add_PSMs + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From ed97e75b8dbabe112b29ead40cac5307f0ee8d94 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:54:19 +0100 Subject: [PATCH 101/227] Update openms_mapaligneridentification.nf --- .../local/openms_mapaligneridentification.nf | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/modules/local/openms_mapaligneridentification.nf b/modules/local/openms_mapaligneridentification.nf index 0e429768..80ac9ba3 100644 --- a/modules/local/openms_mapaligneridentification.nf +++ b/modules/local/openms_mapaligneridentification.nf @@ -20,20 +20,20 @@ process OPENMS_MAPALIGNERIDENTIFICATION { output: tuple val(meta), path("*.trafoXML"), emit: trafoxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def out_names = idxml.collect { it.baseName+'.trafoXML' }.join(' ') """ - MapAlignerIdentification -in ${idxml} \\ - -trafo_out ${out_names} \\ - $options.args - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + MapAlignerIdentification -in $idxml \\ + -trafo_out ${out_names} \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 5eb69a0d4c846c85ac2e54ab99d5e619ed281fbf Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:58:14 +0100 Subject: [PATCH 102/227] Update openms_featurefinderidentification.nf --- modules/local/openms_featurefinderidentification.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/local/openms_featurefinderidentification.nf b/modules/local/openms_featurefinderidentification.nf index a249fbf9..76da91bb 100644 --- a/modules/local/openms_featurefinderidentification.nf +++ b/modules/local/openms_featurefinderidentification.nf @@ -41,7 +41,6 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { -out ${prefix}.featureXML \\ -threads ${task.cpus} \\ $options.args - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 1caa52b15aa25e5be350a2690aa15b7091f7ce2a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:58:41 +0100 Subject: [PATCH 103/227] Update openms_featurelinkerunlabeledkd.nf --- modules/local/openms_featurelinkerunlabeledkd.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_featurelinkerunlabeledkd.nf b/modules/local/openms_featurelinkerunlabeledkd.nf index bc0a62f5..c123a743 100644 --- a/modules/local/openms_featurelinkerunlabeledkd.nf +++ b/modules/local/openms_featurelinkerunlabeledkd.nf @@ -24,7 +24,7 @@ process OPENMS_FEATURELINKERUNLABELEDKD { output: tuple val(meta), path("*.consensusXML"), emit: consensusxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) From 385638f5ffb4d69a575da68f2f887465aa7d38da Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:59:15 +0100 Subject: [PATCH 104/227] Update openms_idconflictresolver.nf --- modules/local/openms_idconflictresolver.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/local/openms_idconflictresolver.nf b/modules/local/openms_idconflictresolver.nf index 3fa65e50..f454cf6a 100644 --- a/modules/local/openms_idconflictresolver.nf +++ b/modules/local/openms_idconflictresolver.nf @@ -27,9 +27,9 @@ process OPENMS_IDCONFLICTRESOLVER { def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_resolved" """ - IDConflictResolver -in ${consensus} \\ + IDConflictResolver -in $consensus \\ -out ${prefix}.consensusXML \\ - -threads ${task.cpus} + -threads $task.cpus cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 7148a5b862ece8c2fe731244b0237d9d0169393a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 15:59:46 +0100 Subject: [PATCH 105/227] Update openms_idmerger.nf --- modules/local/openms_idmerger.nf | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/local/openms_idmerger.nf b/modules/local/openms_idmerger.nf index 424c9e2b..674a3c44 100644 --- a/modules/local/openms_idmerger.nf +++ b/modules/local/openms_idmerger.nf @@ -29,10 +29,9 @@ process OPENMS_IDMERGER { """ IDMerger -in $aligned \\ -out ${prefix}.idXML \\ - -threads ${task.cpus} \\ + -threads $task.cpus \\ -annotate_file_origin \\ -merge_proteins_add_PSMs - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 280aa088f8cffa35be5c05dbd2d67300fbd74d8a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:00:25 +0100 Subject: [PATCH 106/227] Update openms_mapaligneridentification.nf --- modules/local/openms_mapaligneridentification.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/local/openms_mapaligneridentification.nf b/modules/local/openms_mapaligneridentification.nf index 80ac9ba3..7fa67a02 100644 --- a/modules/local/openms_mapaligneridentification.nf +++ b/modules/local/openms_mapaligneridentification.nf @@ -30,7 +30,6 @@ process OPENMS_MAPALIGNERIDENTIFICATION { MapAlignerIdentification -in $idxml \\ -trafo_out ${out_names} \\ $options.args - cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From ae61854e2f7f0827fa2656b395a479402d8c5a5b Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:01:05 +0100 Subject: [PATCH 107/227] Update openms_maprttransformer.nf --- modules/local/openms_maprttransformer.nf | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/modules/local/openms_maprttransformer.nf b/modules/local/openms_maprttransformer.nf index bf9786f2..9490e7a1 100644 --- a/modules/local/openms_maprttransformer.nf +++ b/modules/local/openms_maprttransformer.nf @@ -20,21 +20,20 @@ process OPENMS_MAPRTTRANSFORMER { output: tuple val(meta), path("*_aligned.*"), emit: aligned - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def fileExt = alignment_file.collect { it.name.tokenize("\\.")[1] }.join(' ') """ - MapRTTransformer -in ${alignment_file} \\ - -trafo_in ${trafoxml} \\ - -out ${meta.id}_aligned.${fileExt} \\ - -threads $task.cpus - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + MapRTTransformer -in $alignment_file \\ + -trafo_in $trafoxml \\ + -out $meta.id_aligned.${fileExt} \\ + -threads $task.cpus + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 2d09ea5734db1f0573b7e5796204921d82a229fe Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:01:54 +0100 Subject: [PATCH 108/227] Update openms_mztabexporter.nf --- modules/local/openms_mztabexporter.nf | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/modules/local/openms_mztabexporter.nf b/modules/local/openms_mztabexporter.nf index 3162f862..afa24f27 100644 --- a/modules/local/openms_mztabexporter.nf +++ b/modules/local/openms_mztabexporter.nf @@ -31,13 +31,12 @@ process OPENMS_MZTABEXPORTER { def prefix = options.suffix ? "${meta.sample}_${meta.condition}_${options.suffix}" : "${meta.sample}_${meta.condition}" """ - MzTabExporter -in ${mztab} \\ - -out ${prefix}.mzTab \\ - -threads ${task.cpus} - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + MzTabExporter -in $mztab \\ + -out ${prefix}.mzTab \\ + -threads $task.cpus + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 05c7d3f90941964aed665736f5ceccf4a86c194d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:02:19 +0100 Subject: [PATCH 109/227] Update openms_mztabexporter.nf --- modules/local/openms_mztabexporter.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_mztabexporter.nf b/modules/local/openms_mztabexporter.nf index afa24f27..b4fde477 100644 --- a/modules/local/openms_mztabexporter.nf +++ b/modules/local/openms_mztabexporter.nf @@ -24,7 +24,7 @@ process OPENMS_MZTABEXPORTER { output: tuple val(meta), path("*.mzTab"), emit: mztab - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) From 88c43ea58b1eaaa7862a7f03ffa4490d01d19941 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:02:48 +0100 Subject: [PATCH 110/227] Update openms_peakpickerhires.nf --- modules/local/openms_peakpickerhires.nf | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/modules/local/openms_peakpickerhires.nf b/modules/local/openms_peakpickerhires.nf index b9bcb772..05f27873 100644 --- a/modules/local/openms_peakpickerhires.nf +++ b/modules/local/openms_peakpickerhires.nf @@ -20,20 +20,19 @@ process OPENMS_PEAKPICKERHIRES { output: tuple val(meta), path("*.mzML"), emit: mzml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${mzml.baseName}_${options.suffix}" : "${mzml.baseName}" """ - PeakPickerHiRes -in ${mzml} \\ - -out ${prefix}.mzML \\ - -algorithm:ms_levels ${params.pick_ms_levels} - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + PeakPickerHiRes -in $mzml \\ + -out ${prefix}.mzML \\ + -algorithm:ms_levels ${params.pick_ms_levels} + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 54e525f53b2ad77f6f03be67fa78b54fa434879e Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:03:30 +0100 Subject: [PATCH 111/227] Update openms_peptideindexer.nf --- modules/local/openms_peptideindexer.nf | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/modules/local/openms_peptideindexer.nf b/modules/local/openms_peptideindexer.nf index 1fee3360..d254102a 100644 --- a/modules/local/openms_peptideindexer.nf +++ b/modules/local/openms_peptideindexer.nf @@ -20,23 +20,22 @@ process OPENMS_PEPTIDEINDEXER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${idxml.baseName}_${options.suffix}" : "${idxml.baseName}_idx" """ - PeptideIndexer -in ${idxml} \\ - -out ${prefix}.idXML \\ - -threads ${task.cpus} \\ - -fasta ${fasta} \\ - -decoy_string DECOY \\ - -enzyme:specificity none - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + PeptideIndexer -in $idxml \\ + -out ${prefix}.idXML \\ + -threads $task.cpus \\ + -fasta $fasta \\ + -decoy_string DECOY \\ + -enzyme:specificity none + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From a46f494db6cab4b1c88413c09ab716f3622267db Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:04:05 +0100 Subject: [PATCH 112/227] Update openms_percolatoradapter.nf --- modules/local/openms_percolatoradapter.nf | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/modules/local/openms_percolatoradapter.nf b/modules/local/openms_percolatoradapter.nf index c0b137bb..dd4e374b 100644 --- a/modules/local/openms_percolatoradapter.nf +++ b/modules/local/openms_percolatoradapter.nf @@ -24,21 +24,20 @@ process OPENMS_PERCOLATORADAPTER { output: tuple val(meta), path("*.idXML"), emit: idxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}" """ - OMP_NUM_THREADS=${task.cpus} \\ - PercolatorAdapter -in ${psm} \\ - -out ${prefix}.idXML \\ - $options.args - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + OMP_NUM_THREADS=$task.cpus \\ + PercolatorAdapter -in $psm \\ + -out ${prefix}.idXML \\ + $options.args + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 4e02df629f99bd8f4229264c04a4220a5ece87dc Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:04:48 +0100 Subject: [PATCH 113/227] Update openms_psmfeatureextractor.nf --- modules/local/openms_psmfeatureextractor.nf | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/modules/local/openms_psmfeatureextractor.nf b/modules/local/openms_psmfeatureextractor.nf index af04bd4a..53dd55a1 100644 --- a/modules/local/openms_psmfeatureextractor.nf +++ b/modules/local/openms_psmfeatureextractor.nf @@ -24,20 +24,19 @@ process OPENMS_PSMFEATUREEXTRACTOR { output: tuple val(meta), path("*.idXML"), emit: idxml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${merged.baseName}_${options.suffix}" : "${merged.baseName}_psm" """ - PSMFeatureExtractor -in ${merged} \\ - -out ${prefix}.idXML \\ - -threads ${task.cpus} - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + PSMFeatureExtractor -in $merged \\ + -out ${prefix}.idXML \\ + -threads $task.cpus + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 8c698eb60c430c49442bba6d1c4b3a45103e36cb Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:05:34 +0100 Subject: [PATCH 114/227] Update openms_rtmodel.nf --- modules/local/openms_rtmodel.nf | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/modules/local/openms_rtmodel.nf b/modules/local/openms_rtmodel.nf index 8626cf1f..45c702b9 100644 --- a/modules/local/openms_rtmodel.nf +++ b/modules/local/openms_rtmodel.nf @@ -20,22 +20,21 @@ process OPENMS_RTMODEL { output: tuple val(meta), path("*_rt_training.txt"), path("*.paramXML"), path("*_trainset.txt"), emit: complete - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}" """ - RTModel -in ${rt_training} \\ - -cv:skip_cv \\ - -out ${prefix}_rt_training.txt \\ - -out_oligo_params ${prefix}_params.paramXML \\ - -out_oligo_trainset ${prefix}_trainset.txt - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + RTModel -in $rt_training \\ + -cv:skip_cv \\ + -out ${prefix}_rt_training.txt \\ + -out_oligo_params ${prefix}_params.paramXML \\ + -out_oligo_trainset ${prefix}_trainset.txt + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From d2134cb0fa1134dbbf304d1aaa627f2c65f3d4bf Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:06:11 +0100 Subject: [PATCH 115/227] Update openms_rtpredict.nf --- modules/local/openms_rtpredict.nf | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/modules/local/openms_rtpredict.nf b/modules/local/openms_rtpredict.nf index 40ea2ae0..1c82463d 100644 --- a/modules/local/openms_rtpredict.nf +++ b/modules/local/openms_rtpredict.nf @@ -24,22 +24,21 @@ process OPENMS_RTPREDICT { output: tuple val(meta), path("*.csv"), emit: csv - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_RTpredicted" """ - RTPredict -in_id ${idxml} \\ - -svm_model ${rt_model} \\ - -in_oligo_params ${rt_params} \\ - -in_oligo_trainset ${trainset} \\ - -out_text:file ${prefix}.csv - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + RTPredict -in_id $idxml \\ + -svm_model $rt_model \\ + -in_oligo_params $rt_params \\ + -in_oligo_trainset $trainset \\ + -out_text:file ${prefix}.csv + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From b9c7a39e9ff1d04f89b5bed83cd4de2757eb200d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:08:02 +0100 Subject: [PATCH 116/227] Update openms_textexporter.nf --- modules/local/openms_textexporter.nf | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/modules/local/openms_textexporter.nf b/modules/local/openms_textexporter.nf index bbd04f14..24db7abe 100644 --- a/modules/local/openms_textexporter.nf +++ b/modules/local/openms_textexporter.nf @@ -24,23 +24,22 @@ process OPENMS_TEXTEXPORTER { output: tuple val(meta), path("*.tsv"), emit: tsv - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}" """ - TextExporter -in ${consensus_resolved} \\ - -out ${prefix}.tsv \\ - -threads ${task.cpus} \\ - -id:add_hit_metavalues 0 \\ - -id:add_metavalues 0 \\ - -id:peptides_only - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') - END_VERSIONS + TextExporter -in $consensus_resolved \\ + -out ${prefix}.tsv \\ + -threads $task.cpus \\ + -id:add_hit_metavalues 0 \\ + -id:add_metavalues 0 \\ + -id:peptides_only + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') + END_VERSIONS """ } From 326e54aa20c151a139234a075aa676003179f85d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:08:14 +0100 Subject: [PATCH 117/227] Update modules/local/openms_thermorawfileparser.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/local/openms_thermorawfileparser.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_thermorawfileparser.nf b/modules/local/openms_thermorawfileparser.nf index 85318312..e2f8c59f 100644 --- a/modules/local/openms_thermorawfileparser.nf +++ b/modules/local/openms_thermorawfileparser.nf @@ -8,7 +8,7 @@ process OPENMS_THERMORAWFILEPARSER { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::thermorawfileparser::1.2.3" : null) + conda (params.enable_conda ? "bioconda::thermorawfileparser::1.3.4" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/thermorawfileparser:1.3.4--ha8f3691_0" } else { From 24d53bf0db6199dac901b47fc0e1e567ea9d5fef Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:08:38 +0100 Subject: [PATCH 118/227] Update openms_thermorawfileparser.nf --- modules/local/openms_thermorawfileparser.nf | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/modules/local/openms_thermorawfileparser.nf b/modules/local/openms_thermorawfileparser.nf index e2f8c59f..166cde7e 100644 --- a/modules/local/openms_thermorawfileparser.nf +++ b/modules/local/openms_thermorawfileparser.nf @@ -20,20 +20,19 @@ process OPENMS_THERMORAWFILEPARSER { output: tuple val(meta), path("*.mzML"), emit: mzml - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) def prefix = options.suffix ? "${rawfile.baseName}_${options.suffix}" : "${rawfile.baseName}" """ - ThermoRawFileParser.sh -i=${rawfile} \\ - -f=2 \\ - -b=${prefix}.mzML - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - thermorawfileparser: \$(ThermoRawFileParser.sh --version) - END_VERSIONS + ThermoRawFileParser.sh -i=$rawfile \\ + -f=2 \\ + -b=${prefix}.mzML + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + thermorawfileparser: \$(ThermoRawFileParser.sh --version) + END_VERSIONS """ } From 70aa32b59975f776e714bde7b4f1471c58ceb39c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:09:27 +0100 Subject: [PATCH 119/227] Update predict_possible_class_2_neoepitopes.nf --- .../predict_possible_class_2_neoepitopes.nf | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/modules/local/predict_possible_class_2_neoepitopes.nf b/modules/local/predict_possible_class_2_neoepitopes.nf index 4011ebfc..bf78d38a 100644 --- a/modules/local/predict_possible_class_2_neoepitopes.nf +++ b/modules/local/predict_possible_class_2_neoepitopes.nf @@ -23,21 +23,20 @@ process PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES { tuple val(meta), val(alleles), path(vcf) output: - tuple val(meta), path("*.csv"), emit: csv + tuple val(meta), path("*.csv") , emit: csv tuple val(meta), path("${prefix}.txt"), emit: txt - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_vcf_neoepitopes_class2" """ - vcf_neoepitope_predictor.py -t ${params.variant_annotation_style} -r ${params.variant_reference} -a '${alleles}' -minl ${params.peptide_min_length} -maxl ${params.peptide_max_length} -v ${vcf} -o ${prefix}.csv - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) - END_VERSIONS + vcf_neoepitope_predictor.py -t ${params.variant_annotation_style} -r ${params.variant_reference} -a '$alleles' -minl ${params.peptide_min_length} -maxl ${params.peptide_max_length} -v $vcf -o ${prefix}.csv + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) + END_VERSIONS """ } From 0c0102c431a88b78046d4140418f9a2b1cd32e3b Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:10:04 +0100 Subject: [PATCH 120/227] Update modules/local/predict_possible_neoepitopes.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/local/predict_possible_neoepitopes.nf | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/modules/local/predict_possible_neoepitopes.nf b/modules/local/predict_possible_neoepitopes.nf index fe6461da..aa9b959a 100644 --- a/modules/local/predict_possible_neoepitopes.nf +++ b/modules/local/predict_possible_neoepitopes.nf @@ -31,7 +31,13 @@ process PREDICT_POSSIBLE_NEOEPITOPES { def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_vcf_neoepitopes_class1" """ - vcf_neoepitope_predictor.py -t ${params.variant_annotation_style} -r ${params.variant_reference} -a '${alleles}' -minl ${params.peptide_min_length} -maxl ${params.peptide_max_length} -v ${vcf} -o ${prefix}.csv + vcf_neoepitope_predictor.py \\ + -t ${params.variant_annotation_style} \\ + -r ${params.variant_reference} \\ + -a '${alleles}' -minl ${params.peptide_min_length} \\ + -maxl ${params.peptide_max_length} \\ + -v $vcf \\ + -o ${prefix}.csv cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: From eb09137d95f45902554ca28b0c109984ab8ddb08 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:10:37 +0100 Subject: [PATCH 121/227] Update predict_possible_neoepitopes.nf --- modules/local/predict_possible_neoepitopes.nf | 29 +++++++++---------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/modules/local/predict_possible_neoepitopes.nf b/modules/local/predict_possible_neoepitopes.nf index aa9b959a..f4a67c58 100644 --- a/modules/local/predict_possible_neoepitopes.nf +++ b/modules/local/predict_possible_neoepitopes.nf @@ -25,25 +25,24 @@ process PREDICT_POSSIBLE_NEOEPITOPES { output: tuple val(meta), path("${prefix}.csv"), emit: csv tuple val(meta), path("${prefix}.txt"), emit: txt - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_vcf_neoepitopes_class1" """ - vcf_neoepitope_predictor.py \\ - -t ${params.variant_annotation_style} \\ - -r ${params.variant_reference} \\ - -a '${alleles}' -minl ${params.peptide_min_length} \\ - -maxl ${params.peptide_max_length} \\ - -v $vcf \\ - -o ${prefix}.csv - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) - fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) - END_VERSIONS + vcf_neoepitope_predictor.py \\ + -t ${params.variant_annotation_style} \\ + -r ${params.variant_reference} \\ + -a '$alleles' -minl ${params.peptide_min_length} \\ + -maxl ${params.peptide_max_length} \\ + -v $vcf \\ + -o ${prefix}.csv + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) + END_VERSIONS """ } From e66a0c9c9bf8e85fa34c0184e6e3e65693936ffd Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:11:41 +0100 Subject: [PATCH 122/227] Update resolve_found_class_2_neoepitopes.nf --- modules/local/resolve_found_class_2_neoepitopes.nf | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/modules/local/resolve_found_class_2_neoepitopes.nf b/modules/local/resolve_found_class_2_neoepitopes.nf index fa855187..d8c3f442 100644 --- a/modules/local/resolve_found_class_2_neoepitopes.nf +++ b/modules/local/resolve_found_class_2_neoepitopes.nf @@ -26,14 +26,16 @@ process RESOLVE_FOUND_CLASS_2_NEOEPITOPES { output: tuple val(meta), path("*.csv"), emit: csv - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_found_neoepitopes_class_2" """ - resolve_neoepitopes.py -n ${neoepitopes} -m ${mztab} -f csv -o ${prefix} - + resolve_neoepitopes.py -n $neoepitopes \\ + -m $mztab \\ + -f csv \\ + -o ${prefix} cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 4449d33f3fb3abc37af0d2749324e3eb8ded7d2f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:12:23 +0100 Subject: [PATCH 123/227] Update resolve_found_neoepitopes.nf --- modules/local/resolve_found_neoepitopes.nf | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/modules/local/resolve_found_neoepitopes.nf b/modules/local/resolve_found_neoepitopes.nf index bc8859c3..3ccde51a 100644 --- a/modules/local/resolve_found_neoepitopes.nf +++ b/modules/local/resolve_found_neoepitopes.nf @@ -24,19 +24,21 @@ process RESOLVE_FOUND_NEOEPITOPES { output: tuple val(meta), path("*.csv"), emit: csv - path "versions.yml", emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_found_neoepitopes_class_1" """ - resolve_neoepitopes.py -n ${neoepitopes} -m ${mztab} -f csv -o ${prefix} - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) - mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//')) - fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) - END_VERSIONS + resolve_neoepitopes.py -n $neoepitopes \\ + -m $mztab \\ + -f csv \\ + -o ${prefix} + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//')) + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) + END_VERSIONS """ } From abad0ac1cfc744aa0fcd6b897c9713eef5c042e3 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:12:51 +0100 Subject: [PATCH 124/227] Update samplesheet_check.nf --- modules/local/samplesheet_check.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/samplesheet_check.nf b/modules/local/samplesheet_check.nf index 5c846333..15a9665a 100644 --- a/modules/local/samplesheet_check.nf +++ b/modules/local/samplesheet_check.nf @@ -21,7 +21,7 @@ process SAMPLESHEET_CHECK { path samplesheet output: - path '*.csv', emit: csv + path '*.csv' , emit: csv path "versions.yml", emit: versions script: // This script is bundled with the pipeline, in nf-core/mhcquant/bin/ From 9b7871c4f534e257ceb52836607ee6ea2c8ad53a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:20:02 +0100 Subject: [PATCH 125/227] Update generate_proteins_from_vcf.nf --- modules/local/generate_proteins_from_vcf.nf | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index f00f57dd..464655dd 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -33,7 +33,11 @@ process GENERATE_PROTEINS_FROM_VCF { def prefix = options.suffix ? "${fasta.baseName}_${options.suffix}" : "${fasta.baseName}_added_vcf" """ - variants2fasta.py -v $vcf -f $fasta -o $meta.sample_${prefix}.fasta $options.args + variants2fasta.py -v $vcf \\ + -f $fasta \\ + -o $meta.sample_${prefix}.fasta \\ + $options.args + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) From 5808f630f988c359728fd1dfab10a1b73217dc8f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:20:53 +0100 Subject: [PATCH 126/227] Update mhcflurry_predictneoepitopesclass1.nf --- modules/local/mhcflurry_predictneoepitopesclass1.nf | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/local/mhcflurry_predictneoepitopesclass1.nf b/modules/local/mhcflurry_predictneoepitopesclass1.nf index c8cc5624..b964330a 100644 --- a/modules/local/mhcflurry_predictneoepitopesclass1.nf +++ b/modules/local/mhcflurry_predictneoepitopesclass1.nf @@ -24,8 +24,8 @@ process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { tuple val(meta), val(allotypes), path(neoepitopes) output: - tuple val(meta), path("*.csv") , emit: csv - path "versions.yml" , emit: versions + tuple val(meta), path("*.csv"), emit: csv + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${neoepitopes}_${meta}_${options.suffix}" : "${neoepitopes}_${meta}_predicted_neoepitopes_class_1" @@ -33,6 +33,7 @@ process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_neoepitope_binding_prediction.py '$allotypes' ${prefix}.csv + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From bb5ed686e145910ac0013d193d623005b46b0800 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:21:23 +0100 Subject: [PATCH 127/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index 751a8bf2..f25915d2 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -23,8 +23,8 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { tuple val(meta), path(mztab), val(alleles) output: - tuple val(meta), path("*.csv") , emit: csv - path "versions.yml" , emit: versions + tuple val(meta), path("*.csv"), emit: csv + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}_${options.suffix}" : "${meta.id}_predicted_peptides_class_1" @@ -32,6 +32,7 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_predict_mztab.py '$alleles' $mztab ${prefix}.csv + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 66d31a7f278c3755ba64f576c7b67d798b2eb188 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:22:24 +0100 Subject: [PATCH 128/227] Update mhcflurry_predictpsms.nf --- modules/local/mhcflurry_predictpsms.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index 5d8beedc..95f54f5c 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -32,6 +32,7 @@ process MHCFLURRY_PREDICTPSMS { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '$allotypes' $perc_mztab $psm_mztab ${prefix}.idXML + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From a947b38a7d1c6982a963765fa99b77f4e4eac517 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:22:33 +0100 Subject: [PATCH 129/227] Update mhcnuggets_neoepitopesclass2post.nf --- modules/local/mhcnuggets_neoepitopesclass2post.nf | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2post.nf b/modules/local/mhcnuggets_neoepitopesclass2post.nf index 77d135d4..f6c47c94 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2post.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2post.nf @@ -24,13 +24,14 @@ process MHCNUGGETS_NEOEPITOPESCLASS2POST { tuple val(meta), path(neoepitopes), path(predicted) output: - tuple val(meta), path("*.csv") , emit: csv - path "versions.yml" , emit: versions + tuple val(meta), path("*.csv"), emit: csv + path "versions.yml" , emit: versions script: """ postprocess_neoepitopes_mhcnuggets.py --input $predicted --neoepitopes $neoepitopes + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 5b33cf8b9afe54977f01233373fe7e38030e9310 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:23:49 +0100 Subject: [PATCH 130/227] Update mhcnuggets_neoepitopesclass2pre.nf --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index e0e0ecd0..6e585e80 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -27,6 +27,7 @@ process MHCNUGGETS_NEOEPITOPESCLASS2RE { """ preprocess_neoepitopes_mhcnuggets.py --neoepitopes $neoepitopes --output ${prefix} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 4ea2712709b9cc934b8eacc7a1ad4d5085f322ec Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:25:22 +0100 Subject: [PATCH 131/227] Update mhcnuggets_peptidesclass2post.nf --- modules/local/mhcnuggets_peptidesclass2post.nf | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/modules/local/mhcnuggets_peptidesclass2post.nf b/modules/local/mhcnuggets_peptidesclass2post.nf index 8f24400e..be82de7d 100644 --- a/modules/local/mhcnuggets_peptidesclass2post.nf +++ b/modules/local/mhcnuggets_peptidesclass2post.nf @@ -23,14 +23,17 @@ process MHCNUGGETS_PEPTIDESCLASS2POST { tuple val(meta), path(peptides), path(peptide_to_geneID) output: - tuple val(meta), path('*.csv') , emit: csv - path "versions.yml" , emit: versions + tuple val(meta), path('*.csv'), emit: csv + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_postprocessed" """ - postprocess_peptides_mhcnuggets.py --input $peptides --peptides_seq_ID $peptide_to_geneID --output ${prefix}.csv + postprocess_peptides_mhcnuggets.py --input $peptides \\ + --peptides_seq_ID $peptide_to_geneID \\ + --output ${prefix}.csv + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From a8ee55b92c1ff5bab4dc44d7630126be905980d5 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:25:44 +0100 Subject: [PATCH 132/227] Update mhcnuggets_peptidesclass2pre.nf --- modules/local/mhcnuggets_peptidesclass2pre.nf | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index 1826b933..41c0796d 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -27,7 +27,9 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_preprocessed_mhcnuggets_peptides" """ - preprocess_peptides_mhcnuggets.py --mztab $mztab --output ${prefix} + preprocess_peptides_mhcnuggets.py --mztab $mztab \\ + --output ${prefix} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 952fe3e377c268bc5dd0f2f667646199b777fd13 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:25:58 +0100 Subject: [PATCH 133/227] Update mhcnuggets_predictneoepitopesclass2.nf --- modules/local/mhcnuggets_predictneoepitopesclass2.nf | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index 2fb2be84..c3d6ad98 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -26,7 +26,10 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_predicted_neoepitopes_class_2" """ - mhcnuggets_predict_peptides.py --peptides $neoepitopes --alleles '$alleles' --output ${prefix} + mhcnuggets_predict_peptides.py --peptides $neoepitopes \\ + --alleles '$alleles' \\ + --output ${prefix} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From ef62786a797ebf217692d0ae53e4b521c4dea107 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:27:13 +0100 Subject: [PATCH 134/227] Update mhcnuggets_predictpeptidesclass2.nf --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 5d34afd1..9f9b26de 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -19,14 +19,17 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { tuple val(meta), path(peptides), val(alleles) output: - tuple val(meta), path("*${prefix}*"), emit: csv + tuple val(meta), path("${prefix}"), emit: csv path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_predicted_peptides_class_2" """ - mhcnuggets_predict_peptides.py --peptides $peptides --alleles '$alleles' --output ${prefix} + mhcnuggets_predict_peptides.py --peptides $peptides \\ + --alleles '$alleles' \\ + --output ${prefix} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 2858ab55fcf1e76db4e77fe77161ef56d0ca9a5a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:28:45 +0100 Subject: [PATCH 135/227] Update openms_mapaligneridentification.nf --- modules/local/openms_mapaligneridentification.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_mapaligneridentification.nf b/modules/local/openms_mapaligneridentification.nf index 7fa67a02..80990efc 100644 --- a/modules/local/openms_mapaligneridentification.nf +++ b/modules/local/openms_mapaligneridentification.nf @@ -30,6 +30,7 @@ process OPENMS_MAPALIGNERIDENTIFICATION { MapAlignerIdentification -in $idxml \\ -trafo_out ${out_names} \\ $options.args + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 14ce911dd806618b7233cd91dc8de84eacacb2cd Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:29:02 +0100 Subject: [PATCH 136/227] Update openms_cometadapter.nf --- modules/local/openms_cometadapter.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_cometadapter.nf b/modules/local/openms_cometadapter.nf index b9c0d0d2..6322c8a1 100644 --- a/modules/local/openms_cometadapter.nf +++ b/modules/local/openms_cometadapter.nf @@ -32,6 +32,7 @@ process OPENMS_COMETADAPTER { -database $fasta \\ -threads $task.cpus \\ $options.args + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 64ea6b3a1fba5eb5dccde587d7990b699156de6a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:29:15 +0100 Subject: [PATCH 137/227] Update openms_decoydatabase.nf --- modules/local/openms_decoydatabase.nf | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/local/openms_decoydatabase.nf b/modules/local/openms_decoydatabase.nf index 25355aad..c3827f42 100644 --- a/modules/local/openms_decoydatabase.nf +++ b/modules/local/openms_decoydatabase.nf @@ -19,8 +19,8 @@ process OPENMS_DECOYDATABASE { tuple val(meta), path(fasta) output: - tuple val(meta), path("*.fasta") , emit: decoy - path "versions.yml" , emit: versions + tuple val(meta), path("*.fasta"), emit: decoy + path "versions.yml" , emit: versions script: def software = getSoftwareName(task.process) @@ -31,6 +31,7 @@ process OPENMS_DECOYDATABASE { -out ${prefix}.fasta \\ -decoy_string DECOY_ \\ -decoy_string_position prefix + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From d10a9ed2c0b3b86de5c10baa133c9f389144fffd Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:29:26 +0100 Subject: [PATCH 138/227] Update openms_falsediscoveryrate.nf --- modules/local/openms_falsediscoveryrate.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_falsediscoveryrate.nf b/modules/local/openms_falsediscoveryrate.nf index 4c9d1116..8010c4d0 100644 --- a/modules/local/openms_falsediscoveryrate.nf +++ b/modules/local/openms_falsediscoveryrate.nf @@ -31,6 +31,7 @@ process OPENMS_FALSEDISCOVERYRATE { -protein 'false' \\ -out ${prefix}.idXML \\ -threads $task.cpus + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From bd28e173a58eeed4f37afe4fcd3667a720f0bdef Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:29:55 +0100 Subject: [PATCH 139/227] Update openms_featurefinderidentification.nf --- modules/local/openms_featurefinderidentification.nf | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/modules/local/openms_featurefinderidentification.nf b/modules/local/openms_featurefinderidentification.nf index 76da91bb..b9d69b17 100644 --- a/modules/local/openms_featurefinderidentification.nf +++ b/modules/local/openms_featurefinderidentification.nf @@ -31,16 +31,17 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_${meta.id}" if (!params.quantification_fdr){ - arguments = "-id ${id_quant}" + arguments = "-id $id_quant" } else { - arguments = "-id ${id_quant_int} -id_ext ${id_quant} -svm:min_prob ${params.quantification_min_prob}" + arguments = "-id $id_quant_int -id_ext $id_quant -svm:min_prob ${params.quantification_min_prob}" } """ - FeatureFinderIdentification -in ${mzml} \\ + FeatureFinderIdentification -in $mzml \\ -out ${prefix}.featureXML \\ - -threads ${task.cpus} \\ + -threads $task.cpus \\ $options.args + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From fa525757a2c47bc34ac828a5f999f3d94281c315 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:30:05 +0100 Subject: [PATCH 140/227] Update openms_featurelinkerunlabeledkd.nf --- modules/local/openms_featurelinkerunlabeledkd.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_featurelinkerunlabeledkd.nf b/modules/local/openms_featurelinkerunlabeledkd.nf index c123a743..14b47b16 100644 --- a/modules/local/openms_featurelinkerunlabeledkd.nf +++ b/modules/local/openms_featurelinkerunlabeledkd.nf @@ -34,6 +34,7 @@ process OPENMS_FEATURELINKERUNLABELEDKD { FeatureLinkerUnlabeledKD -in $features \\ -out '${prefix}.consensusXML' \\ -threads $task.cpus + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 81615ec366f7e155d82b510fb0ebb2657afa9643 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:30:16 +0100 Subject: [PATCH 141/227] Update openms_idconflictresolver.nf --- modules/local/openms_idconflictresolver.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_idconflictresolver.nf b/modules/local/openms_idconflictresolver.nf index f454cf6a..f44869cf 100644 --- a/modules/local/openms_idconflictresolver.nf +++ b/modules/local/openms_idconflictresolver.nf @@ -30,6 +30,7 @@ process OPENMS_IDCONFLICTRESOLVER { IDConflictResolver -in $consensus \\ -out ${prefix}.consensusXML \\ -threads $task.cpus + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From c52a8e9a7acaf5b0295871ace0ac8cc5f9176b8e Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:30:29 +0100 Subject: [PATCH 142/227] Update openms_idfilter.nf --- modules/local/openms_idfilter.nf | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/local/openms_idfilter.nf b/modules/local/openms_idfilter.nf index 9311623b..4662cc16 100644 --- a/modules/local/openms_idfilter.nf +++ b/modules/local/openms_idfilter.nf @@ -28,7 +28,7 @@ process OPENMS_IDFILTER { script: def software = getSoftwareName(task.process) - def whitelist = "${peptide_filter}" + def whitelist = "$peptide_filter" def prefix = options.suffix ? "${idxml.baseName}_${options.suffix}" : "${meta.id}_-_idx_fdr_filtered" if (whitelist == "input.2") { @@ -41,6 +41,7 @@ process OPENMS_IDFILTER { -threads $task.cpus \\ $options.args \\ $whitelist + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From cc0aee231a12f1dc4c4ee913ea5f26ed31830aeb Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:30:38 +0100 Subject: [PATCH 143/227] Update openms_idmerger.nf --- modules/local/openms_idmerger.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_idmerger.nf b/modules/local/openms_idmerger.nf index 674a3c44..deaa32bc 100644 --- a/modules/local/openms_idmerger.nf +++ b/modules/local/openms_idmerger.nf @@ -32,6 +32,7 @@ process OPENMS_IDMERGER { -threads $task.cpus \\ -annotate_file_origin \\ -merge_proteins_add_PSMs + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 882aae6a48f1f18f87ffcbd19137be23d602711f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:32:40 +0100 Subject: [PATCH 144/227] Update predict_possible_neoepitopes.nf --- modules/local/predict_possible_neoepitopes.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/predict_possible_neoepitopes.nf b/modules/local/predict_possible_neoepitopes.nf index f4a67c58..4b7e950d 100644 --- a/modules/local/predict_possible_neoepitopes.nf +++ b/modules/local/predict_possible_neoepitopes.nf @@ -38,6 +38,7 @@ process PREDICT_POSSIBLE_NEOEPITOPES { -maxl ${params.peptide_max_length} \\ -v $vcf \\ -o ${prefix}.csv + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 1f04ec0302aa6f44838af5e99e69d0d7d2df0ec8 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:32:48 +0100 Subject: [PATCH 145/227] Update openms_maprttransformer.nf --- modules/local/openms_maprttransformer.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_maprttransformer.nf b/modules/local/openms_maprttransformer.nf index 9490e7a1..a6597915 100644 --- a/modules/local/openms_maprttransformer.nf +++ b/modules/local/openms_maprttransformer.nf @@ -31,6 +31,7 @@ process OPENMS_MAPRTTRANSFORMER { -trafo_in $trafoxml \\ -out $meta.id_aligned.${fileExt} \\ -threads $task.cpus + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 653349a62451e49889607bb12335ff774798b90b Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:32:54 +0100 Subject: [PATCH 146/227] Update openms_mztabexporter.nf --- modules/local/openms_mztabexporter.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_mztabexporter.nf b/modules/local/openms_mztabexporter.nf index b4fde477..91bb7762 100644 --- a/modules/local/openms_mztabexporter.nf +++ b/modules/local/openms_mztabexporter.nf @@ -34,6 +34,7 @@ process OPENMS_MZTABEXPORTER { MzTabExporter -in $mztab \\ -out ${prefix}.mzTab \\ -threads $task.cpus + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From bb4879ee7ef91eaed1f103b5c1160d88c2251d78 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:33:04 +0100 Subject: [PATCH 147/227] Update openms_peakpickerhires.nf --- modules/local/openms_peakpickerhires.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_peakpickerhires.nf b/modules/local/openms_peakpickerhires.nf index 05f27873..0b1ff0cc 100644 --- a/modules/local/openms_peakpickerhires.nf +++ b/modules/local/openms_peakpickerhires.nf @@ -30,6 +30,7 @@ process OPENMS_PEAKPICKERHIRES { PeakPickerHiRes -in $mzml \\ -out ${prefix}.mzML \\ -algorithm:ms_levels ${params.pick_ms_levels} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From b6133f145b91639ee6a84c36faa962d8ad4ed228 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:33:11 +0100 Subject: [PATCH 148/227] Update openms_peptideindexer.nf --- modules/local/openms_peptideindexer.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_peptideindexer.nf b/modules/local/openms_peptideindexer.nf index d254102a..6f13d3c4 100644 --- a/modules/local/openms_peptideindexer.nf +++ b/modules/local/openms_peptideindexer.nf @@ -33,6 +33,7 @@ process OPENMS_PEPTIDEINDEXER { -fasta $fasta \\ -decoy_string DECOY \\ -enzyme:specificity none + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 4e948a78e8c3262b0a376e5e92ce262daa9965ff Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:33:19 +0100 Subject: [PATCH 149/227] Update openms_percolatoradapter.nf --- modules/local/openms_percolatoradapter.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_percolatoradapter.nf b/modules/local/openms_percolatoradapter.nf index dd4e374b..0e8eb256 100644 --- a/modules/local/openms_percolatoradapter.nf +++ b/modules/local/openms_percolatoradapter.nf @@ -35,6 +35,7 @@ process OPENMS_PERCOLATORADAPTER { PercolatorAdapter -in $psm \\ -out ${prefix}.idXML \\ $options.args + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From b9790b427ef0d204ec5e9e6062d23bd331672b3a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:33:27 +0100 Subject: [PATCH 150/227] Update openms_psmfeatureextractor.nf --- modules/local/openms_psmfeatureextractor.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_psmfeatureextractor.nf b/modules/local/openms_psmfeatureextractor.nf index 53dd55a1..63e8081d 100644 --- a/modules/local/openms_psmfeatureextractor.nf +++ b/modules/local/openms_psmfeatureextractor.nf @@ -34,6 +34,7 @@ process OPENMS_PSMFEATUREEXTRACTOR { PSMFeatureExtractor -in $merged \\ -out ${prefix}.idXML \\ -threads $task.cpus + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From f0ea89585b9bb7a628935b2c6c826cffd5c7a563 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:33:35 +0100 Subject: [PATCH 151/227] Update openms_rtmodel.nf --- modules/local/openms_rtmodel.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_rtmodel.nf b/modules/local/openms_rtmodel.nf index 45c702b9..2b97c570 100644 --- a/modules/local/openms_rtmodel.nf +++ b/modules/local/openms_rtmodel.nf @@ -32,6 +32,7 @@ process OPENMS_RTMODEL { -out ${prefix}_rt_training.txt \\ -out_oligo_params ${prefix}_params.paramXML \\ -out_oligo_trainset ${prefix}_trainset.txt + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 48dcfbd9ee86f476f691b7a3c7f5a0bf6fc7ff6d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:33:40 +0100 Subject: [PATCH 152/227] Update openms_rtpredict.nf --- modules/local/openms_rtpredict.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_rtpredict.nf b/modules/local/openms_rtpredict.nf index 1c82463d..7d8fb8cc 100644 --- a/modules/local/openms_rtpredict.nf +++ b/modules/local/openms_rtpredict.nf @@ -36,6 +36,7 @@ process OPENMS_RTPREDICT { -in_oligo_params $rt_params \\ -in_oligo_trainset $trainset \\ -out_text:file ${prefix}.csv + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 1e9bbd03a6ebc0de0da99e21ff9c50ac945df2a9 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:33:47 +0100 Subject: [PATCH 153/227] Update openms_textexporter.nf --- modules/local/openms_textexporter.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_textexporter.nf b/modules/local/openms_textexporter.nf index 24db7abe..23c915d5 100644 --- a/modules/local/openms_textexporter.nf +++ b/modules/local/openms_textexporter.nf @@ -37,6 +37,7 @@ process OPENMS_TEXTEXPORTER { -id:add_hit_metavalues 0 \\ -id:add_metavalues 0 \\ -id:peptides_only + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 77b941534e11396ae63da299bec037f16f43a5c2 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:34:16 +0100 Subject: [PATCH 154/227] Update predict_possible_class_2_neoepitopes.nf --- modules/local/predict_possible_class_2_neoepitopes.nf | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/modules/local/predict_possible_class_2_neoepitopes.nf b/modules/local/predict_possible_class_2_neoepitopes.nf index bf78d38a..edfcab65 100644 --- a/modules/local/predict_possible_class_2_neoepitopes.nf +++ b/modules/local/predict_possible_class_2_neoepitopes.nf @@ -31,7 +31,14 @@ process PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES { def prefix = options.suffix ? "${meta}_${options.suffix}" : "${meta}_vcf_neoepitopes_class2" """ - vcf_neoepitope_predictor.py -t ${params.variant_annotation_style} -r ${params.variant_reference} -a '$alleles' -minl ${params.peptide_min_length} -maxl ${params.peptide_max_length} -v $vcf -o ${prefix}.csv + vcf_neoepitope_predictor.py -t ${params.variant_annotation_style} \\ + -r ${params.variant_reference} \\ + -a '$alleles' \\ + -minl ${params.peptide_min_length} \\ + -maxl ${params.peptide_max_length} \\ + -v $vcf \\ + -o ${prefix}.csv + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 5db4068d3013e940ebf72fdc08e76da84c66f62d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:35:39 +0100 Subject: [PATCH 155/227] Update resolve_found_class_2_neoepitopes.nf --- modules/local/resolve_found_class_2_neoepitopes.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/resolve_found_class_2_neoepitopes.nf b/modules/local/resolve_found_class_2_neoepitopes.nf index d8c3f442..ca04a436 100644 --- a/modules/local/resolve_found_class_2_neoepitopes.nf +++ b/modules/local/resolve_found_class_2_neoepitopes.nf @@ -36,6 +36,7 @@ process RESOLVE_FOUND_CLASS_2_NEOEPITOPES { -m $mztab \\ -f csv \\ -o ${prefix} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From e60abbcbf21e57a6f2c176a765bd9082ffe7f260 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:35:44 +0100 Subject: [PATCH 156/227] Update resolve_found_neoepitopes.nf --- modules/local/resolve_found_neoepitopes.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/resolve_found_neoepitopes.nf b/modules/local/resolve_found_neoepitopes.nf index 3ccde51a..35466e88 100644 --- a/modules/local/resolve_found_neoepitopes.nf +++ b/modules/local/resolve_found_neoepitopes.nf @@ -34,6 +34,7 @@ process RESOLVE_FOUND_NEOEPITOPES { -m $mztab \\ -f csv \\ -o ${prefix} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 8274dc11ced93ae7d8e2d3348721d30bd6a3a488 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:38:15 +0100 Subject: [PATCH 157/227] Update mhcnuggets_predictneoepitopesclass2.nf --- modules/local/mhcnuggets_predictneoepitopesclass2.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index c3d6ad98..5e473625 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -19,7 +19,7 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { tuple val(meta), path(neoepitopes), val(alleles) output: - tuple val(meta), path("*${prefix}*"), emit: csv + tuple val(meta), path("*${prefix}"), emit: csv path "versions.yml" , emit: versions script: From 00631b1262011f64ed01b9a5f50853cb3ab82cc6 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 16:51:40 +0100 Subject: [PATCH 158/227] Update openms_maprttransformer.nf --- modules/local/openms_maprttransformer.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_maprttransformer.nf b/modules/local/openms_maprttransformer.nf index a6597915..c51f1911 100644 --- a/modules/local/openms_maprttransformer.nf +++ b/modules/local/openms_maprttransformer.nf @@ -29,7 +29,7 @@ process OPENMS_MAPRTTRANSFORMER { """ MapRTTransformer -in $alignment_file \\ -trafo_in $trafoxml \\ - -out $meta.id_aligned.${fileExt} \\ + -out ${meta.id}_aligned.${fileExt} \\ -threads $task.cpus cat <<-END_VERSIONS > versions.yml From 6d7fdb6c26978d27d1bc0e4b853a78dce30e6a2a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 17:04:31 +0100 Subject: [PATCH 159/227] Update openms_featurefinderidentification.nf --- modules/local/openms_featurefinderidentification.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/openms_featurefinderidentification.nf b/modules/local/openms_featurefinderidentification.nf index b9d69b17..39cc533e 100644 --- a/modules/local/openms_featurefinderidentification.nf +++ b/modules/local/openms_featurefinderidentification.nf @@ -40,6 +40,7 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { FeatureFinderIdentification -in $mzml \\ -out ${prefix}.featureXML \\ -threads $task.cpus \\ + ${arguments} \\ $options.args cat <<-END_VERSIONS > versions.yml From 119903ead6c79d45b37995481b35cecd589fb7a3 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 17:14:09 +0100 Subject: [PATCH 160/227] Update mhcnuggets_neoepitopesclass2pre.nf --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index 6e585e80..d3cabe15 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -19,7 +19,7 @@ process MHCNUGGETS_NEOEPITOPESCLASS2RE { tuple val(meta), path(neoepitopes) output: - tuple val(meta), path("*${prefix}*"), emit: preprocessed + tuple val(meta), path("*${prefix}"), emit: preprocessed path "versions.yml" , emit: versions script: From 4b06d98273cc2c974c77a217b36a039ecc83f05d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 17:15:05 +0100 Subject: [PATCH 161/227] Update mhcnuggets_peptidesclass2pre.nf --- modules/local/mhcnuggets_peptidesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index 41c0796d..7209a4ec 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -19,7 +19,7 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { tuple val(meta), path(mztab) output: - tuple val(meta), path("*${prefix}*") , emit: preprocessed + tuple val(meta), path("${prefix}") , emit: preprocessed tuple val(meta), path('*peptide_to_geneID*'), emit: geneID path "versions.yml" , emit: versions From e3f59efc615c6d4b071671272f81f44a386049db Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 17:23:24 +0100 Subject: [PATCH 162/227] Update mhcnuggets_peptidesclass2pre.nf --- modules/local/mhcnuggets_peptidesclass2pre.nf | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index 7209a4ec..d3f88bf3 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -19,12 +19,12 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { tuple val(meta), path(mztab) output: - tuple val(meta), path("${prefix}") , emit: preprocessed - tuple val(meta), path('*peptide_to_geneID*'), emit: geneID - path "versions.yml" , emit: versions + tuple val(meta), path("*_peptides") , emit: preprocessed + tuple val(meta), path('peptide_to_geneID'), emit: geneID + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_preprocessed_mhcnuggets_peptides" + def prefix = options.suffix ? "${meta.sample}_${options.suffix}_peptides" : "${meta.sample}_preprocessed_mhcnuggets_peptides" """ preprocess_peptides_mhcnuggets.py --mztab $mztab \\ From a79a9886f46714e3f857b2760b150288cf90ae0b Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 17:26:49 +0100 Subject: [PATCH 163/227] Update mhcnuggets_predictpeptidesclass2.nf --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 9f9b26de..9bdf89ed 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -19,11 +19,11 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { tuple val(meta), path(peptides), val(alleles) output: - tuple val(meta), path("${prefix}"), emit: csv - path "versions.yml" , emit: versions + tuple val(meta), path("*_class_2"), emit: csv + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.sample}_${options.suffix}" : "${meta.sample}_predicted_peptides_class_2" + def prefix = options.suffix ? "${meta.sample}_${options.suffix}_class_2" : "${meta.sample}_predicted_peptides_class_2" """ mhcnuggets_predict_peptides.py --peptides $peptides \\ From d49044504264de4d773a3ad330f0b8574636d9ed Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:24:29 +0100 Subject: [PATCH 164/227] Update generate_proteins_from_vcf.nf Removing trailing ws --- modules/local/generate_proteins_from_vcf.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/generate_proteins_from_vcf.nf b/modules/local/generate_proteins_from_vcf.nf index 464655dd..b07589de 100644 --- a/modules/local/generate_proteins_from_vcf.nf +++ b/modules/local/generate_proteins_from_vcf.nf @@ -37,7 +37,7 @@ process GENERATE_PROTEINS_FROM_VCF { -f $fasta \\ -o $meta.sample_${prefix}.fasta \\ $options.args - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) From 52de64dfe79a5232a9944c23d386a3b5735525c4 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:24:48 +0100 Subject: [PATCH 165/227] Update mhcflurry_predictneoepitopesclass1.nf --- modules/local/mhcflurry_predictneoepitopesclass1.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictneoepitopesclass1.nf b/modules/local/mhcflurry_predictneoepitopesclass1.nf index b964330a..37c938cc 100644 --- a/modules/local/mhcflurry_predictneoepitopesclass1.nf +++ b/modules/local/mhcflurry_predictneoepitopesclass1.nf @@ -33,7 +33,7 @@ process MHCFLURRY_PREDICTNEOEPITOPESCLASS1 { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_neoepitope_binding_prediction.py '$allotypes' ${prefix}.csv - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From f2bc841ae9a2332990d68de889400c67f3210f41 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:25:02 +0100 Subject: [PATCH 166/227] Update mhcflurry_predictpeptidesclass1.nf --- modules/local/mhcflurry_predictpeptidesclass1.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpeptidesclass1.nf b/modules/local/mhcflurry_predictpeptidesclass1.nf index f25915d2..a5509ed0 100644 --- a/modules/local/mhcflurry_predictpeptidesclass1.nf +++ b/modules/local/mhcflurry_predictpeptidesclass1.nf @@ -32,7 +32,7 @@ process MHCFLURRY_PREDICTPEPTIDESCLASS1 { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_predict_mztab.py '$alleles' $mztab ${prefix}.csv - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 16e34316462d11901d25193fe333a57bf4e0f830 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:25:14 +0100 Subject: [PATCH 167/227] Update mhcflurry_predictpsms.nf --- modules/local/mhcflurry_predictpsms.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index 95f54f5c..1cff8eb4 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -32,7 +32,7 @@ process MHCFLURRY_PREDICTPSMS { """ mhcflurry-downloads --quiet fetch models_class1 mhcflurry_predict_mztab_for_filtering.py ${params.subset_affinity_threshold} '$allotypes' $perc_mztab $psm_mztab ${prefix}.idXML - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 02560ed1205b410dccbd197f10c8bd395dd244f6 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:25:28 +0100 Subject: [PATCH 168/227] Update mhcnuggets_neoepitopesclass2post.nf --- modules/local/mhcnuggets_neoepitopesclass2post.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2post.nf b/modules/local/mhcnuggets_neoepitopesclass2post.nf index f6c47c94..90eeead8 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2post.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2post.nf @@ -31,7 +31,7 @@ process MHCNUGGETS_NEOEPITOPESCLASS2POST { """ postprocess_neoepitopes_mhcnuggets.py --input $predicted --neoepitopes $neoepitopes - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From cf0b6dc50337828088a87a6f0501f40e15d77eef Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:25:41 +0100 Subject: [PATCH 169/227] Update mhcnuggets_neoepitopesclass2pre.nf --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index d3cabe15..5a0bce70 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -27,7 +27,7 @@ process MHCNUGGETS_NEOEPITOPESCLASS2RE { """ preprocess_neoepitopes_mhcnuggets.py --neoepitopes $neoepitopes --output ${prefix} - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From eea9cb1c525ac5750ebabf02bed5a8d24ed47961 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:25:58 +0100 Subject: [PATCH 170/227] Update mhcnuggets_peptidesclass2post.nf --- modules/local/mhcnuggets_peptidesclass2post.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_peptidesclass2post.nf b/modules/local/mhcnuggets_peptidesclass2post.nf index be82de7d..0af8cb0f 100644 --- a/modules/local/mhcnuggets_peptidesclass2post.nf +++ b/modules/local/mhcnuggets_peptidesclass2post.nf @@ -33,7 +33,7 @@ process MHCNUGGETS_PEPTIDESCLASS2POST { postprocess_peptides_mhcnuggets.py --input $peptides \\ --peptides_seq_ID $peptide_to_geneID \\ --output ${prefix}.csv - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 963c94807b16d6cebb03ebf265bc3bde42eb412d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:26:13 +0100 Subject: [PATCH 171/227] Update mhcnuggets_peptidesclass2pre.nf --- modules/local/mhcnuggets_peptidesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index d3f88bf3..efa902b0 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -29,7 +29,7 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { """ preprocess_peptides_mhcnuggets.py --mztab $mztab \\ --output ${prefix} - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 44a4dee1f9499395ab7724010a69fe7f18345fde Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:26:35 +0100 Subject: [PATCH 172/227] Update mhcnuggets_predictneoepitopesclass2.nf --- modules/local/mhcnuggets_predictneoepitopesclass2.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index 5e473625..0aefe521 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -29,7 +29,7 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { mhcnuggets_predict_peptides.py --peptides $neoepitopes \\ --alleles '$alleles' \\ --output ${prefix} - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 3cb2ae76434d172d9e06a0862c3a54dc6441d46a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:26:52 +0100 Subject: [PATCH 173/227] Update mhcnuggets_predictpeptidesclass2.nf --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 9bdf89ed..38c29c66 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -29,7 +29,7 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { mhcnuggets_predict_peptides.py --peptides $peptides \\ --alleles '$alleles' \\ --output ${prefix} - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) From 542c23b9c2cedf0fe329fdf3a4916f569305e135 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:27:06 +0100 Subject: [PATCH 174/227] Update openms_cometadapter.nf --- modules/local/openms_cometadapter.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_cometadapter.nf b/modules/local/openms_cometadapter.nf index 6322c8a1..ad49aa2b 100644 --- a/modules/local/openms_cometadapter.nf +++ b/modules/local/openms_cometadapter.nf @@ -32,7 +32,7 @@ process OPENMS_COMETADAPTER { -database $fasta \\ -threads $task.cpus \\ $options.args - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 41c338d25a2e8f707b31a2761741315b336bab49 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:27:22 +0100 Subject: [PATCH 175/227] Update openms_decoydatabase.nf --- modules/local/openms_decoydatabase.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_decoydatabase.nf b/modules/local/openms_decoydatabase.nf index c3827f42..2f9ebc4a 100644 --- a/modules/local/openms_decoydatabase.nf +++ b/modules/local/openms_decoydatabase.nf @@ -31,7 +31,7 @@ process OPENMS_DECOYDATABASE { -out ${prefix}.fasta \\ -decoy_string DECOY_ \\ -decoy_string_position prefix - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 3e4e7351ec1b8450cfe23fd3cae2640aa9e8543d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:27:38 +0100 Subject: [PATCH 176/227] Update openms_falsediscoveryrate.nf --- modules/local/openms_falsediscoveryrate.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_falsediscoveryrate.nf b/modules/local/openms_falsediscoveryrate.nf index 8010c4d0..c25d7058 100644 --- a/modules/local/openms_falsediscoveryrate.nf +++ b/modules/local/openms_falsediscoveryrate.nf @@ -31,7 +31,7 @@ process OPENMS_FALSEDISCOVERYRATE { -protein 'false' \\ -out ${prefix}.idXML \\ -threads $task.cpus - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 0021e753da8877d262c013185eff1cef8f74fb86 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:27:52 +0100 Subject: [PATCH 177/227] Update openms_featurefinderidentification.nf --- modules/local/openms_featurefinderidentification.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_featurefinderidentification.nf b/modules/local/openms_featurefinderidentification.nf index 39cc533e..eacb21a3 100644 --- a/modules/local/openms_featurefinderidentification.nf +++ b/modules/local/openms_featurefinderidentification.nf @@ -42,7 +42,7 @@ process OPENMS_FEATUREFINDERIDENTIFICATION { -threads $task.cpus \\ ${arguments} \\ $options.args - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 9e5f947c470cdcf112cb680f5b8cc8b6f30041ce Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:28:06 +0100 Subject: [PATCH 178/227] Update openms_featurelinkerunlabeledkd.nf --- modules/local/openms_featurelinkerunlabeledkd.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_featurelinkerunlabeledkd.nf b/modules/local/openms_featurelinkerunlabeledkd.nf index 14b47b16..ad8db0ba 100644 --- a/modules/local/openms_featurelinkerunlabeledkd.nf +++ b/modules/local/openms_featurelinkerunlabeledkd.nf @@ -34,7 +34,7 @@ process OPENMS_FEATURELINKERUNLABELEDKD { FeatureLinkerUnlabeledKD -in $features \\ -out '${prefix}.consensusXML' \\ -threads $task.cpus - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 6aa1b1c5ee286d1a8ee108c540e744f5490f9824 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:28:19 +0100 Subject: [PATCH 179/227] Update openms_idconflictresolver.nf --- modules/local/openms_idconflictresolver.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_idconflictresolver.nf b/modules/local/openms_idconflictresolver.nf index f44869cf..5053c53b 100644 --- a/modules/local/openms_idconflictresolver.nf +++ b/modules/local/openms_idconflictresolver.nf @@ -30,7 +30,7 @@ process OPENMS_IDCONFLICTRESOLVER { IDConflictResolver -in $consensus \\ -out ${prefix}.consensusXML \\ -threads $task.cpus - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From acb8edb4dd7c0ce45a3bdf6c4c878f96c999dd50 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:28:32 +0100 Subject: [PATCH 180/227] Update openms_idfilter.nf --- modules/local/openms_idfilter.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_idfilter.nf b/modules/local/openms_idfilter.nf index 4662cc16..b59321a0 100644 --- a/modules/local/openms_idfilter.nf +++ b/modules/local/openms_idfilter.nf @@ -41,7 +41,7 @@ process OPENMS_IDFILTER { -threads $task.cpus \\ $options.args \\ $whitelist - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 66fcdce5d176e1a7521f5f92a4f6eab0ec275681 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:28:45 +0100 Subject: [PATCH 181/227] Update openms_idmerger.nf --- modules/local/openms_idmerger.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_idmerger.nf b/modules/local/openms_idmerger.nf index deaa32bc..289d250b 100644 --- a/modules/local/openms_idmerger.nf +++ b/modules/local/openms_idmerger.nf @@ -32,7 +32,7 @@ process OPENMS_IDMERGER { -threads $task.cpus \\ -annotate_file_origin \\ -merge_proteins_add_PSMs - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 75976966fc909de4e9a215ca49926d8a3bef7c2d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:28:58 +0100 Subject: [PATCH 182/227] Update openms_mapaligneridentification.nf --- modules/local/openms_mapaligneridentification.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_mapaligneridentification.nf b/modules/local/openms_mapaligneridentification.nf index 80990efc..80ac9ba3 100644 --- a/modules/local/openms_mapaligneridentification.nf +++ b/modules/local/openms_mapaligneridentification.nf @@ -30,7 +30,7 @@ process OPENMS_MAPALIGNERIDENTIFICATION { MapAlignerIdentification -in $idxml \\ -trafo_out ${out_names} \\ $options.args - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 02b508dcc08ead5c22e237497ed010e59d9f73f6 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:29:10 +0100 Subject: [PATCH 183/227] Update openms_maprttransformer.nf --- modules/local/openms_maprttransformer.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_maprttransformer.nf b/modules/local/openms_maprttransformer.nf index c51f1911..171bc744 100644 --- a/modules/local/openms_maprttransformer.nf +++ b/modules/local/openms_maprttransformer.nf @@ -31,7 +31,7 @@ process OPENMS_MAPRTTRANSFORMER { -trafo_in $trafoxml \\ -out ${meta.id}_aligned.${fileExt} \\ -threads $task.cpus - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 28260b9beafdafaec07b399971c846cfdc0d3e8c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:29:28 +0100 Subject: [PATCH 184/227] Update openms_mztabexporter.nf --- modules/local/openms_mztabexporter.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_mztabexporter.nf b/modules/local/openms_mztabexporter.nf index 91bb7762..bfdcece6 100644 --- a/modules/local/openms_mztabexporter.nf +++ b/modules/local/openms_mztabexporter.nf @@ -34,7 +34,7 @@ process OPENMS_MZTABEXPORTER { MzTabExporter -in $mztab \\ -out ${prefix}.mzTab \\ -threads $task.cpus - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From f6faf347462c87b58a2492d5fd3b4cba2851a33a Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:29:41 +0100 Subject: [PATCH 185/227] Update openms_peakpickerhires.nf --- modules/local/openms_peakpickerhires.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_peakpickerhires.nf b/modules/local/openms_peakpickerhires.nf index 0b1ff0cc..76718e40 100644 --- a/modules/local/openms_peakpickerhires.nf +++ b/modules/local/openms_peakpickerhires.nf @@ -30,7 +30,7 @@ process OPENMS_PEAKPICKERHIRES { PeakPickerHiRes -in $mzml \\ -out ${prefix}.mzML \\ -algorithm:ms_levels ${params.pick_ms_levels} - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From bc77841bada4db7e4b9805480b7a42aea5ac4dd8 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:29:54 +0100 Subject: [PATCH 186/227] Update openms_peptideindexer.nf --- modules/local/openms_peptideindexer.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_peptideindexer.nf b/modules/local/openms_peptideindexer.nf index 6f13d3c4..935bfa23 100644 --- a/modules/local/openms_peptideindexer.nf +++ b/modules/local/openms_peptideindexer.nf @@ -33,7 +33,7 @@ process OPENMS_PEPTIDEINDEXER { -fasta $fasta \\ -decoy_string DECOY \\ -enzyme:specificity none - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 09694a48d26f4dbbcd98829cf13c989e2607b8e4 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:30:09 +0100 Subject: [PATCH 187/227] Update openms_percolatoradapter.nf --- modules/local/openms_percolatoradapter.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_percolatoradapter.nf b/modules/local/openms_percolatoradapter.nf index 0e8eb256..e9a07e5c 100644 --- a/modules/local/openms_percolatoradapter.nf +++ b/modules/local/openms_percolatoradapter.nf @@ -35,7 +35,7 @@ process OPENMS_PERCOLATORADAPTER { PercolatorAdapter -in $psm \\ -out ${prefix}.idXML \\ $options.args - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From e2e128aac5787914fc7abe3545634e5e8db17156 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:30:23 +0100 Subject: [PATCH 188/227] Update openms_psmfeatureextractor.nf --- modules/local/openms_psmfeatureextractor.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_psmfeatureextractor.nf b/modules/local/openms_psmfeatureextractor.nf index 63e8081d..bc8fc3e3 100644 --- a/modules/local/openms_psmfeatureextractor.nf +++ b/modules/local/openms_psmfeatureextractor.nf @@ -34,7 +34,7 @@ process OPENMS_PSMFEATUREEXTRACTOR { PSMFeatureExtractor -in $merged \\ -out ${prefix}.idXML \\ -threads $task.cpus - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From f8b01a79b0be8528754c6cd4e0b86b48cb74ee36 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:30:36 +0100 Subject: [PATCH 189/227] Update openms_rtmodel.nf --- modules/local/openms_rtmodel.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_rtmodel.nf b/modules/local/openms_rtmodel.nf index 2b97c570..79687b59 100644 --- a/modules/local/openms_rtmodel.nf +++ b/modules/local/openms_rtmodel.nf @@ -32,7 +32,7 @@ process OPENMS_RTMODEL { -out ${prefix}_rt_training.txt \\ -out_oligo_params ${prefix}_params.paramXML \\ -out_oligo_trainset ${prefix}_trainset.txt - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From d79165116c286f9c23f3e74b99cdd63d2ca3bf9d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:30:48 +0100 Subject: [PATCH 190/227] Update openms_rtpredict.nf --- modules/local/openms_rtpredict.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_rtpredict.nf b/modules/local/openms_rtpredict.nf index 7d8fb8cc..31f95c9f 100644 --- a/modules/local/openms_rtpredict.nf +++ b/modules/local/openms_rtpredict.nf @@ -36,7 +36,7 @@ process OPENMS_RTPREDICT { -in_oligo_params $rt_params \\ -in_oligo_trainset $trainset \\ -out_text:file ${prefix}.csv - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms-thirdparty: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From f0ad674e81ce2e2fc8367b8b5b697d0b65ad60bc Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:31:01 +0100 Subject: [PATCH 191/227] Update openms_textexporter.nf --- modules/local/openms_textexporter.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms_textexporter.nf b/modules/local/openms_textexporter.nf index 23c915d5..075dc1d5 100644 --- a/modules/local/openms_textexporter.nf +++ b/modules/local/openms_textexporter.nf @@ -37,7 +37,7 @@ process OPENMS_TEXTEXPORTER { -id:add_hit_metavalues 0 \\ -id:add_metavalues 0 \\ -id:peptides_only - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: openms: \$(echo \$(FileInfo --help 2>&1) | sed 's/^.*Version: //; s/-.*\$//' | sed 's/ -*//; s/ .*\$//') From 8ca2652826f87128b6b95b46c4c9e1b122d7a9b9 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:31:13 +0100 Subject: [PATCH 192/227] Update predict_possible_class_2_neoepitopes.nf --- modules/local/predict_possible_class_2_neoepitopes.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/predict_possible_class_2_neoepitopes.nf b/modules/local/predict_possible_class_2_neoepitopes.nf index edfcab65..11162f1c 100644 --- a/modules/local/predict_possible_class_2_neoepitopes.nf +++ b/modules/local/predict_possible_class_2_neoepitopes.nf @@ -38,7 +38,7 @@ process PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES { -maxl ${params.peptide_max_length} \\ -v $vcf \\ -o ${prefix}.csv - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 4e69765fe9cb6deafc896d2574b7b6c19f76f4cf Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:31:25 +0100 Subject: [PATCH 193/227] Update predict_possible_neoepitopes.nf --- modules/local/predict_possible_neoepitopes.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/predict_possible_neoepitopes.nf b/modules/local/predict_possible_neoepitopes.nf index 4b7e950d..e775e982 100644 --- a/modules/local/predict_possible_neoepitopes.nf +++ b/modules/local/predict_possible_neoepitopes.nf @@ -38,7 +38,7 @@ process PREDICT_POSSIBLE_NEOEPITOPES { -maxl ${params.peptide_max_length} \\ -v $vcf \\ -o ${prefix}.csv - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 9cbd9cc2d818d599e7445e07837e13283a30a9c0 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:31:43 +0100 Subject: [PATCH 194/227] Update resolve_found_class_2_neoepitopes.nf --- modules/local/resolve_found_class_2_neoepitopes.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/resolve_found_class_2_neoepitopes.nf b/modules/local/resolve_found_class_2_neoepitopes.nf index ca04a436..64ac4bfd 100644 --- a/modules/local/resolve_found_class_2_neoepitopes.nf +++ b/modules/local/resolve_found_class_2_neoepitopes.nf @@ -36,7 +36,7 @@ process RESOLVE_FOUND_CLASS_2_NEOEPITOPES { -m $mztab \\ -f csv \\ -o ${prefix} - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 0571bac1f0e0fbfe668d76f8ea64bc5d3883e13f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 26 Nov 2021 18:31:57 +0100 Subject: [PATCH 195/227] Update resolve_found_neoepitopes.nf --- modules/local/resolve_found_neoepitopes.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/resolve_found_neoepitopes.nf b/modules/local/resolve_found_neoepitopes.nf index 35466e88..ece53e31 100644 --- a/modules/local/resolve_found_neoepitopes.nf +++ b/modules/local/resolve_found_neoepitopes.nf @@ -34,7 +34,7 @@ process RESOLVE_FOUND_NEOEPITOPES { -m $mztab \\ -f csv \\ -o ${prefix} - + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) From 4f1def1273c26d7e689bebd65aaa8d91bbe310b3 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 11:33:24 +0100 Subject: [PATCH 196/227] Update awsfulltest.yml Included suggestion from @ggabernet : There have been some changes in this awsfulltest.yml as the tests were broken. Could you change at line 17: nf-core/tower-action@master to nf-core/tower-action@v2 Then line 20: bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} to access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} --- .github/workflows/awsfulltest.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml index 57e020ff..d5b5d1bb 100644 --- a/.github/workflows/awsfulltest.yml +++ b/.github/workflows/awsfulltest.yml @@ -14,10 +14,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Launch workflow via tower - uses: nf-core/tower-action@master + uses: nf-core/tower-action@v2 with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} - bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} + bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} to access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} pipeline: ${{ github.repository }} revision: ${{ github.sha }} @@ -27,4 +27,3 @@ jobs: "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/mhcquant/results-${{ github.sha }}" } profiles: '[ "test_full", "aws_tower" ]' - From 3dc91725a20e4ec66e3db021616ffa38cf8e5496 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 11:34:53 +0100 Subject: [PATCH 197/227] Update awsfulltest.yml --- .github/workflows/awsfulltest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml index d5b5d1bb..fa118c9f 100644 --- a/.github/workflows/awsfulltest.yml +++ b/.github/workflows/awsfulltest.yml @@ -17,7 +17,7 @@ jobs: uses: nf-core/tower-action@v2 with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} - bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} to access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} + access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} pipeline: ${{ github.repository }} revision: ${{ github.sha }} From 8cf82e12d8d69c692efe14ba0561b9c4de48d1b0 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 12:36:13 +0100 Subject: [PATCH 198/227] Update awsfulltest.yml --- .github/workflows/awsfulltest.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml index fa118c9f..02797c64 100644 --- a/.github/workflows/awsfulltest.yml +++ b/.github/workflows/awsfulltest.yml @@ -18,12 +18,13 @@ jobs: with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} - compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} + compute_env: ${{ secrets.TOWE R_COMPUTE_ENV }} pipeline: ${{ github.repository }} - revision: ${{ github.sha }} + revision: ${{ github.sha } } workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/mhcquant/work-${{ github.sha }} parameters: | { "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/mhcquant/results-${{ github.sha }}" } - profiles: '[ "test_full", "aws_tower" ]' + profiles: test,aws_tower + pre_run_script: 'export NXF_VER=21.10.3' From bc7f0d2442b20a7bddd9ea4d3ffc48c7631b1b7d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 12:48:00 +0100 Subject: [PATCH 199/227] Update awstest.yml Adjusted based on suggestions --- .github/workflows/awstest.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index 1fb18487..acc6f70b 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -11,11 +11,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Launch workflow via tower - uses: nf-core/tower-action@master + uses: nf-core/tower-action@v2 with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} - bearer_token: ${{ secrets.TOWER_BEARER_TOKEN }} + access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} pipeline: ${{ github.repository }} revision: ${{ github.sha }} @@ -24,4 +24,5 @@ jobs: { "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/mhcquant/results-${{ github.sha }}" } - profiles: '[ "test", "aws_tower" ]' + profiles: test,aws_tower + pre_run_script: 'export NXF_VER=21.10.3' From 335b512b00bf260b03eb9b42bd00af1ad568f925 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 12:52:07 +0100 Subject: [PATCH 200/227] Update schema_input.json Change the patter for the condition --- assets/schema_input.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/schema_input.json b/assets/schema_input.json index 134d1b43..4b3654a5 100644 --- a/assets/schema_input.json +++ b/assets/schema_input.json @@ -18,7 +18,7 @@ }, "Condition": { "type": "string", - "pattern": "^\\S+\\.f(ast)?q\\.gz$", + "pattern": "^\S+-?", "errorMessage": "Sample condition must be provided and cannot contain spaces" }, "ReplicateFileName": { From 6db7db3f561099aa547cb37966c36027cdd689fd Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 12:52:47 +0100 Subject: [PATCH 201/227] Update schema_input.json Added an additional slash --- assets/schema_input.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/schema_input.json b/assets/schema_input.json index 4b3654a5..f8583477 100644 --- a/assets/schema_input.json +++ b/assets/schema_input.json @@ -18,7 +18,7 @@ }, "Condition": { "type": "string", - "pattern": "^\S+-?", + "pattern": "^\\S+-?", "errorMessage": "Sample condition must be provided and cannot contain spaces" }, "ReplicateFileName": { From 98153cce263ccb929fae8c7348c526d3b46b119f Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 12:53:43 +0100 Subject: [PATCH 202/227] Delete modules/nf-core/modules/fastqc directory --- modules/nf-core/modules/fastqc/functions.nf | 78 --------------------- modules/nf-core/modules/fastqc/main.nf | 54 -------------- modules/nf-core/modules/fastqc/meta.yml | 52 -------------- 3 files changed, 184 deletions(-) delete mode 100644 modules/nf-core/modules/fastqc/functions.nf delete mode 100644 modules/nf-core/modules/fastqc/main.nf delete mode 100644 modules/nf-core/modules/fastqc/meta.yml diff --git a/modules/nf-core/modules/fastqc/functions.nf b/modules/nf-core/modules/fastqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nf-core/modules/fastqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nf-core/modules/fastqc/main.nf b/modules/nf-core/modules/fastqc/main.nf deleted file mode 100644 index 9f6cfc55..00000000 --- a/modules/nf-core/modules/fastqc/main.nf +++ /dev/null @@ -1,54 +0,0 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -process FASTQC { - tag "$meta.id" - label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - - conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0" - } else { - container "quay.io/biocontainers/fastqc:0.11.9--0" - } - - input: - tuple val(meta), path(reads) - - output: - tuple val(meta), path("*.html"), emit: html - tuple val(meta), path("*.zip") , emit: zip - path "versions.yml" , emit: versions - - script: - // Add soft-links to original FastQs for consistent naming in pipeline - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - if (meta.single_end) { - """ - [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}.fastq.gz - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) - END_VERSIONS - """ - } else { - """ - [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz - [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) - END_VERSIONS - """ - } -} diff --git a/modules/nf-core/modules/fastqc/meta.yml b/modules/nf-core/modules/fastqc/meta.yml deleted file mode 100644 index b09553a3..00000000 --- a/modules/nf-core/modules/fastqc/meta.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: fastqc -description: Run FastQC on sequenced reads -keywords: - - quality control - - qc - - adapters - - fastq -tools: - - fastqc: - description: | - FastQC gives general quality metrics about your reads. - It provides information about the quality score distribution - across your reads, the per base sequence content (%A/C/G/T). - You get information about adapter contamination and other - overrepresented sequences. - homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ - documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ - licence: ['GPL-2.0-only'] -input: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - reads: - type: file - description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, - respectively. -output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - html: - type: file - description: FastQC report - pattern: "*_{fastqc.html}" - - zip: - type: file - description: FastQC report archive - pattern: "*_{fastqc.zip}" - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" -authors: - - "@drpatelh" - - "@grst" - - "@ewels" - - "@FelixKrueger" From 9d025728e33d453c127e07c8617a3ee6de561519 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 12:54:46 +0100 Subject: [PATCH 203/227] Update modules.json Remove the fastqc module --- modules.json | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/modules.json b/modules.json index a817ae90..554d418c 100644 --- a/modules.json +++ b/modules.json @@ -6,12 +6,9 @@ "custom/dumpsoftwareversions": { "git_sha": "3aacd46da2b221ed47aaa05c413a828538d2c2ae" }, - "fastqc": { - "git_sha": "3aacd46da2b221ed47aaa05c413a828538d2c2ae" - }, "multiqc": { "git_sha": "3aacd46da2b221ed47aaa05c413a828538d2c2ae" } } } -} \ No newline at end of file +} From eade29f46b77aaceb8e1e2bfde0a9ce316358fde Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:33:07 +0100 Subject: [PATCH 204/227] Update resolve_found_class_2_neoepitopes.nf Add output dir --- modules/local/resolve_found_class_2_neoepitopes.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/resolve_found_class_2_neoepitopes.nf b/modules/local/resolve_found_class_2_neoepitopes.nf index 64ac4bfd..80fd21c5 100644 --- a/modules/local/resolve_found_class_2_neoepitopes.nf +++ b/modules/local/resolve_found_class_2_neoepitopes.nf @@ -10,7 +10,7 @@ process RESOLVE_FOUND_CLASS_2_NEOEPITOPES { publishDir "${params.outdir}", mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'.', publish_id:'') } + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } echo true From ef2c4e31ebd9ce74311b6f2f44d954180f8a50cf Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:41:41 +0100 Subject: [PATCH 205/227] Update mhcnuggets_neoepitopesclass2pre.nf Include results path --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index 5a0bce70..c3169ec1 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -7,7 +7,11 @@ options = initOptions(params.options) process MHCNUGGETS_NEOEPITOPESCLASS2RE { tag "$meta" label 'process_low' - + + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" From b8ad69bf908ae294d36978df17fb405db4c1d85e Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:42:54 +0100 Subject: [PATCH 206/227] Update mhcnuggets_peptidesclass2pre.nf Include result path --- modules/local/mhcnuggets_peptidesclass2pre.nf | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index efa902b0..fa608347 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -8,6 +8,10 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { tag "$meta" label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" From 6a20112af7fe1079728158657326827b45e03dba Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:43:46 +0100 Subject: [PATCH 207/227] Update mhcnuggets_predictneoepitopesclass2.nf Include result path --- modules/local/mhcnuggets_predictneoepitopesclass2.nf | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/local/mhcnuggets_predictneoepitopesclass2.nf b/modules/local/mhcnuggets_predictneoepitopesclass2.nf index 0aefe521..446ee885 100644 --- a/modules/local/mhcnuggets_predictneoepitopesclass2.nf +++ b/modules/local/mhcnuggets_predictneoepitopesclass2.nf @@ -8,6 +8,10 @@ process MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 { tag "$meta" label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2--py_0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" From 7f231f82c4f6920c867f934d294ce4acf26b4508 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:45:04 +0100 Subject: [PATCH 208/227] Update mhcnuggets_predictpeptidesclass2.nf Include results path --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 38c29c66..90db203f 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -8,6 +8,10 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { tag "$meta" label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } + conda (params.enable_conda ? "bioconda::fred2=2.0.6 bioconda::mhcflurry=1.4.3 bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mulled-v2-689ae0756dd82c61400782baaa8a7a1c2289930d:a9e10ca22d4cbcabf6b54f0fb5d766ea16bb171e-0" From bc11599d6a0a5428d2385464f85dd57d30886fef Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:51:12 +0100 Subject: [PATCH 209/227] Update mhcnuggets_neoepitopesclass2pre.nf remove trailing ws --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index c3169ec1..a4f03988 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -11,7 +11,7 @@ process MHCNUGGETS_NEOEPITOPESCLASS2RE { publishDir "${params.outdir}", mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } - + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" From 9708f3ed069f45535405c4ceef6766abe00876f7 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:51:29 +0100 Subject: [PATCH 210/227] Update mhcnuggets_neoepitopesclass2pre.nf remove trailing ws --- modules/local/mhcnuggets_neoepitopesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_neoepitopesclass2pre.nf b/modules/local/mhcnuggets_neoepitopesclass2pre.nf index a4f03988..98d0f503 100644 --- a/modules/local/mhcnuggets_neoepitopesclass2pre.nf +++ b/modules/local/mhcnuggets_neoepitopesclass2pre.nf @@ -7,7 +7,7 @@ options = initOptions(params.options) process MHCNUGGETS_NEOEPITOPESCLASS2RE { tag "$meta" label 'process_low' - + publishDir "${params.outdir}", mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } From 6340d9543f5793c31f3de777079f6d8265427243 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:52:17 +0100 Subject: [PATCH 211/227] Update mhcnuggets_peptidesclass2pre.nf removing trailing ws --- modules/local/mhcnuggets_peptidesclass2pre.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_peptidesclass2pre.nf b/modules/local/mhcnuggets_peptidesclass2pre.nf index fa608347..665749e9 100644 --- a/modules/local/mhcnuggets_peptidesclass2pre.nf +++ b/modules/local/mhcnuggets_peptidesclass2pre.nf @@ -11,7 +11,7 @@ process MHCNUGGETS_PEPTIDESCLASS2PRE { publishDir "${params.outdir}", mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } - + conda (params.enable_conda ? "bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mhcnuggets:2.3.2--py_0" From 03e7d4c2972ffcd289b0debb93475d872568a69d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 13:52:46 +0100 Subject: [PATCH 212/227] Update mhcnuggets_predictpeptidesclass2.nf Remove trailing ws --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 90db203f..8ab9897f 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -11,7 +11,7 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { publishDir "${params.outdir}", mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'class_2_bindings', publish_id:'class_2_bindings') } - + conda (params.enable_conda ? "bioconda::fred2=2.0.6 bioconda::mhcflurry=1.4.3 bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/mulled-v2-689ae0756dd82c61400782baaa8a7a1c2289930d:a9e10ca22d4cbcabf6b54f0fb5d766ea16bb171e-0" From ed133a7f991721f089bc59cc06ef05915e516501 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 15:26:22 +0100 Subject: [PATCH 213/227] Update output.md Update the output file, since there were some changes during the PR --- docs/output.md | 39 ++++++++++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/docs/output.md b/docs/output.md index b5cb02fc..fc20548d 100644 --- a/docs/output.md +++ b/docs/output.md @@ -22,6 +22,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d * [Class (1|2) bindings](#class-12-bindings) * [Rotation time prediction](#rotation-time-prediction) * [Workflow reporting and documentation](#workflow-reporting-and-documentation) + * [MultiQC](#multiqc) * [Pipeline information](#pipeline-information) ## General @@ -31,7 +32,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d
Output files -* `csv` : If `--skip_quantification` is not specified. +* `*.tsv` : If `--skip_quantification` is not specified.
@@ -122,8 +123,11 @@ These CSV files list all of the theoretically possible neoepitope sequences from
Output files -* `*found_neoepitopes_class1.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_1` are specified -* `*found_neoepitopes_class2.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_2` are specified +* `class_1_bindings/` + * `*found_neoepitopes_class1.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_1` are specified + +* `class_2_bindings/` + * `*found_neoepitopes_class2.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_2` are specified
@@ -139,8 +143,11 @@ peptide sequence geneID
Output files -* `*vcf_neoepitopes_class1.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_1` are specified -* `*vcf_neoepitopes_class2.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_2` are specified +* `class_1_bindings/` + * `*vcf_neoepitopes_class1.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_1` are specified + +* `class_2_bindings/` + * `*vcf_neoepitopes_class2.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_2` are specified
@@ -158,8 +165,11 @@ Sequence Antigen ID Variants
Output files -* `*predicted_peptides_class_1.csv`: If `--predict_class_1` is specified, then this CSV is generated -* `*predicted_peptides_class_2.csv`: If `--predict_class_2` is specified, then this CSV is generated +* `class_1_bindings/` + * `*predicted_peptides_class_1.csv`: If `--predict_class_1` is specified, then this CSV is generated + +* `class_2_bindings/` + * `*predicted_peptides_class_2.csv`: If `--predict_class_2` is specified, then this CSV is generated
@@ -183,6 +193,21 @@ peptide allele prediction prediction_low prediction_high prediction_pe ## Workflow reporting and documentation +### MultiQC +
+Output files + +* `multiqc/` + * `multiqc_report.html`: a standalone HTML file that can be viewed in your web browser. + * `multiqc_data/`: directory containing parsed statistics from the different tools used in the pipeline. +
+ +MultiQC is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. + +The pipeline has special steps which allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see http://multiqc.info. + + + ### Pipeline information
From 7afe602518a0e91b2819afcacc123e579c40d883 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Fri, 3 Dec 2021 15:32:29 +0100 Subject: [PATCH 214/227] Update output.md --- docs/output.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/output.md b/docs/output.md index fc20548d..38e993f9 100644 --- a/docs/output.md +++ b/docs/output.md @@ -144,7 +144,7 @@ peptide sequence geneID Output files * `class_1_bindings/` - * `*vcf_neoepitopes_class1.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_1` are specified + * `*vcf_neoepitopes_class1.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_1` are specified * `class_2_bindings/` * `*vcf_neoepitopes_class2.csv`: Generated when `--include_proteins_from_vcf` and `--predict_class_2` are specified @@ -166,7 +166,7 @@ Sequence Antigen ID Variants Output files * `class_1_bindings/` - * `*predicted_peptides_class_1.csv`: If `--predict_class_1` is specified, then this CSV is generated + * `*predicted_peptides_class_1.csv`: If `--predict_class_1` is specified, then this CSV is generated * `class_2_bindings/` * `*predicted_peptides_class_2.csv`: If `--predict_class_2` is specified, then this CSV is generated @@ -194,19 +194,19 @@ peptide allele prediction prediction_low prediction_high prediction_pe ## Workflow reporting and documentation ### MultiQC +
Output files * `multiqc/` * `multiqc_report.html`: a standalone HTML file that can be viewed in your web browser. * `multiqc_data/`: directory containing parsed statistics from the different tools used in the pipeline. +
MultiQC is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. -The pipeline has special steps which allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see http://multiqc.info. - - +The pipeline has special steps which allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . ### Pipeline information From 2fe61b224349da9563cbd63a0c0e44873b1e36b1 Mon Sep 17 00:00:00 2001 From: Gisela Gabernet Date: Wed, 8 Dec 2021 16:05:13 +0100 Subject: [PATCH 215/227] Update .github/workflows/awsfulltest.yml --- .github/workflows/awsfulltest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml index 02797c64..6c6c71d7 100644 --- a/.github/workflows/awsfulltest.yml +++ b/.github/workflows/awsfulltest.yml @@ -18,7 +18,7 @@ jobs: with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} - compute_env: ${{ secrets.TOWE R_COMPUTE_ENV }} + compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} pipeline: ${{ github.repository }} revision: ${{ github.sha } } workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/mhcquant/work-${{ github.sha }} From 253cd1474758f55e8aaa71c8f445e8c716ac8810 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 8 Dec 2021 17:12:23 +0100 Subject: [PATCH 216/227] Update mhcnuggets_predictpeptidesclass2.nf Included mhcflurry --- modules/local/mhcnuggets_predictpeptidesclass2.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/local/mhcnuggets_predictpeptidesclass2.nf b/modules/local/mhcnuggets_predictpeptidesclass2.nf index 8ab9897f..f85c8208 100644 --- a/modules/local/mhcnuggets_predictpeptidesclass2.nf +++ b/modules/local/mhcnuggets_predictpeptidesclass2.nf @@ -36,6 +36,7 @@ process MHCNUGGETS_PREDICTPEPTIDESCLASS2 { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: + mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) END_VERSIONS From 5b140551920f2602613132def7cc97bac725317c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 8 Dec 2021 17:13:08 +0100 Subject: [PATCH 217/227] Update ci.yml Change the nxf_ver --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c7488714..510e8f01 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: # Nextflow versions: check pipeline minimum and current latest - nxf_ver: ['21.04.0', ''] + nxf_ver: ['21.10.3', ''] steps: - name: Check out pipeline code uses: actions/checkout@v2 From 6f203d3522085735ea6e6ddc10e12c206473ead6 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 8 Dec 2021 17:33:44 +0100 Subject: [PATCH 218/227] Update nextflow.config Change version --- nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nextflow.config b/nextflow.config index d414d6d6..f895cfc9 100644 --- a/nextflow.config +++ b/nextflow.config @@ -213,7 +213,7 @@ manifest { homePage = 'https://github.com/nf-core/mhcquant' description = 'Identify and quantify peptides from mass spectrometry raw data' mainScript = 'main.nf' - nextflowVersion = '!>=21.04.0' + nextflowVersion = '!>=21.10.3' version = '2.1.0' } From 58aabe48e5c793a00bc329bba583f8233d248c16 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 8 Dec 2021 17:34:40 +0100 Subject: [PATCH 219/227] Update README.md Change the nextflow version --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index d7698ca8..2a9651db 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/mhcquant/results) [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.5407955-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.5407955) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -35,7 +35,7 @@ On release, automated continuous integration tests run the pipeline on a full-si ## Quick Start -1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.04.0`) +1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=21.10.3`) 2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(please only use [`Conda`](https://conda.io/miniconda.html) as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_. From ca6506e27608bbb58d0566608d2f82aa4e6b394e Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 8 Dec 2021 19:23:34 +0100 Subject: [PATCH 220/227] Update mhcquant.nf fixed indentation --- workflows/mhcquant.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index a2ce1035..8b6e140f 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -134,15 +134,15 @@ include { OPENMS_TEXTEXPORTER } from '../modules/loc include { OPENMS_MZTABEXPORTER } from '../modules/local/openms_mztabexporter' addParams( options: [:] ) include { MHCFLURRY_PREDICTPEPTIDESCLASS1 } from '../modules/local/mhcflurry_predictpeptidesclass1' addParams( options: [:] ) -include { MHCNUGGETS_PEPTIDESCLASS2PRE } from '../modules/local/mhcnuggets_peptidesclass2pre' addParams( options: [:] ) -include { MHCNUGGETS_PREDICTPEPTIDESCLASS2 } from '../modules/local/mhcnuggets_predictpeptidesclass2' addParams( options: [:] ) +include { MHCNUGGETS_PEPTIDESCLASS2PRE } from '../modules/local/mhcnuggets_peptidesclass2pre' addParams( options: [:] ) +include { MHCNUGGETS_PREDICTPEPTIDESCLASS2 } from '../modules/local/mhcnuggets_predictpeptidesclass2' addParams( options: [:] ) include { MHCNUGGETS_PEPTIDESCLASS2POST } from '../modules/local/mhcnuggets_peptidesclass2post' addParams( options: [:] ) include { PREDICT_POSSIBLE_NEOEPITOPES } from '../modules/local/predict_possible_neoepitopes' addParams( options: [:] ) include { PREDICT_POSSIBLE_CLASS_2_NEOEPITOPES } from '../modules/local/predict_possible_class_2_neoepitopes' addParams( options: [:] ) include { RESOLVE_FOUND_NEOEPITOPES } from '../modules/local/resolve_found_neoepitopes' addParams( options: [:] ) include { RESOLVE_FOUND_CLASS_2_NEOEPITOPES } from '../modules/local/resolve_found_class_2_neoepitopes' addParams( options: [:] ) include { MHCFLURRY_PREDICTNEOEPITOPESCLASS1 } from '../modules/local/mhcflurry_predictneoepitopesclass1' addParams( options: [:] ) -include { MHCNUGGETS_NEOEPITOPESCLASS2RE } from '../modules/local/mhcnuggets_neoepitopesclass2pre' addParams( options: [:] ) +include { MHCNUGGETS_NEOEPITOPESCLASS2RE } from '../modules/local/mhcnuggets_neoepitopesclass2pre' addParams( options: [:] ) include { MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 } from '../modules/local/mhcnuggets_predictneoepitopesclass2' addParams( options: [:] ) include { MHCNUGGETS_NEOEPITOPESCLASS2POST } from '../modules/local/mhcnuggets_neoepitopesclass2post' addParams( options: [:] ) From 8929f198ad488614be2fef17194d81b3e75b515d Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Wed, 8 Dec 2021 19:35:48 +0100 Subject: [PATCH 221/227] Update CHANGELOG.md add date --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 77ac83b1..e90f6ee1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # nf-core/mhcquant: Changelog -## v2.1.0 nf-core/mhcquant "Olive Tin Hamster" - 2021/MM/DD +## v2.1.0 nf-core/mhcquant "Olive Tin Hamster" - 2021/12/08 ### `Added` From 099f3f748bc5a72210acfb11be16194c35141bb3 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:53:22 +0100 Subject: [PATCH 222/227] Update modules.config Include modules for refine process --- conf/modules.config | 43 ++++++++++++++++++++++++++++++++++++------- 1 file changed, 36 insertions(+), 7 deletions(-) diff --git a/conf/modules.config b/conf/modules.config index 0c837d89..bed18e01 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -23,6 +23,10 @@ params { modules { + 'generate_proteins_from_vcf' { + args= "-t ${params.variant_annotation_style} -r ${params.variant_reference}" + } + 'multiqc' { args = '' publish_dir = "multiqc/" @@ -36,20 +40,45 @@ params { args = "-precursor_mass_tolerance ${params.precursor_mass_tolerance} -fragment_mass_tolerance ${params.fragment_mass_tolerance} -fragment_bin_offset ${params.fragment_bin_offset} -num_hits ${params.num_hits} -digest_mass_range ${params.digest_mass_range} -max_variable_mods_in_peptide ${params.number_mods} -missed_cleavages 0 -precursor_charge ${params.prec_charge} -activation_method ${params.activation_method} -variable_modifications ${params.variable_mods.tokenize(',').collect { "'${it}'" }.join(" ") } -fixed_modifications ${params.fixed_mods.tokenize(',').collect { "'${it}'"}.join(" ")} -enzyme '${params.enzyme}' -spectrum_batch_size ${params.spectrum_batch_size} " } - 'generate_proteins_from_vcf' { - args= "-t ${params.variant_annotation_style} -r ${params.variant_reference}" + 'openms_id_filter' { + args = "-remove_decoys -precursor:length '${params.peptide_min_length}:${params.peptide_max_length}' -delete_unreferenced_peptide_hits " + } + + 'openms_id_filter_refiner' { + args = "-remove_decoys -precursor:length '${params.peptide_min_length}:${params.peptide_max_length}' -delete_unreferenced_peptide_hits " + suffix = "perc_subset_filtered" } - 'percolator_adapter' { + 'openms_id_filter_whitelist' { + args = "-whitelist:ignore_modifications -whitelist:peptides " + suffix = "pred_filtered" + } + + 'openms_mztab_exporter_perc' { + suffix = "all_ids_merged_psm_perc_filtered" + } + + 'openms_mztab_exporter_psm' { + suffix = "all_ids_merged" + } + + 'openms_percolator_adapter' { args = "-seed 4711 -trainFDR 0.05 -testFDR 0.05 -enzyme no_enzyme -subset_max_train ${params.subset_max_train} -doc ${params.description_correct_features} " + suffix = "all_ids_merged_psm_perc" } - 'id_filter' { - args = "-remove_decoys -precursor:length '${params.peptide_min_length}:${params.peptide_max_length}' -delete_unreferenced_peptide_hits " + 'openms_percolator_adapter_refine' { + args = "-seed 4711 -trainFDR 0.05 -testFDR 0.05 -enzyme no_enzyme -subset_max_train ${params.subset_max_train} -doc ${params.description_correct_features} " + suffix = "perc_subset" } - 'id_filter_whitelist' { - args = "-whitelist:ignore_modifications -whitelist:peptides " + 'openms_rt_predict_peptides' { + suffix = "_id_files_for_rt_prediction_RTpredicted" } + + 'openms_rt_predict_neo_epitopes' { + suffix = "_txt_file_for_rt_prediction_RTpredicted" + } + } } From 740c27834fcb597d2d653c9caa00198b5f33ea58 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:54:16 +0100 Subject: [PATCH 223/227] Update refine_fdr_on_predicted_subset.nf --- .../local/refine_fdr_on_predicted_subset.nf | 41 +++++++++---------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/subworkflows/local/refine_fdr_on_predicted_subset.nf b/subworkflows/local/refine_fdr_on_predicted_subset.nf index d39e1f4f..b786d649 100644 --- a/subworkflows/local/refine_fdr_on_predicted_subset.nf +++ b/subworkflows/local/refine_fdr_on_predicted_subset.nf @@ -3,27 +3,24 @@ * that are called when the paramater "refine_fdr_on_predicted_subset" is provided */ -// VALIDATED (EQUAL TO THE OLD CODE) - -params.percolator_adapter_options = [:] -params.filter_options = [:] +params.exporter_prec_options = [:] +params.exporter_psm_options = [:] +params.percolator_adapter_refine_options = [:] params.whitelist_filter_options = [:] +params.filter_options = [:] -def percolator_adapter_options = params.percolator_adapter_options.clone() -percolator_adapter_options.suffix = "perc_subset" - -def filter_psms_options = params.whitelist_filter_options.clone() -def filter_refined_qvalue_options = params.filter_options.clone() - -filter_psms_options.suffix = "pred_filtered" -filter_refined_qvalue_options.suffix = "perc_subset_filtered" +def openms_mztab_exporter_prec_options = params.exporter_prec_options.clone() +def openms_mztab_exporter_psm_options = params.exporter_psm_options.clone() +def openms_percolator_adapter_options = params.percolator_adapter_refine_options.clone() +def openms_id_filter_psms_options = params.whitelist_filter_options.clone() +def openms_id_filter_qvalue_options = params.filter_options.clone() -include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPERC } from '../../modules/local/openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged_psm_perc_filtered" ] ) -include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPSM } from '../../modules/local/openms_mztabexporter' addParams( options: [ suffix: "all_ids_merged" ] ) +include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPERC } from '../../modules/local/openms_mztabexporter' addParams( options: openms_mztab_exporter_prec_options ) +include { OPENMS_MZTABEXPORTER as OPENMS_MZTABEXPORTERPSM } from '../../modules/local/openms_mztabexporter' addParams( options: openms_mztab_exporter_psm_options ) include { MHCFLURRY_PREDICTPSMS } from '../../modules/local/mhcflurry_predictpsms' addParams( options: [:] ) -include { OPENMS_PERCOLATORADAPTER } from '../../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_options ) -include { OPENMS_IDFILTER as OPENMS_IDFILTER_PSMS } from '../../modules/local/openms_idfilter' addParams( options: filter_psms_options ) -include { OPENMS_IDFILTER as OPENMS_IDFILTER_REFINED } from '../../modules/local/openms_idfilter' addParams( options: filter_refined_qvalue_options ) +include { OPENMS_PERCOLATORADAPTER } from '../../modules/local/openms_percolatoradapter' addParams( options: openms_percolator_adapter_options ) +include { OPENMS_IDFILTER as OPENMS_IDFILTER_PSMS } from '../../modules/local/openms_idfilter' addParams( options: openms_id_filter_psms_options ) +include { OPENMS_IDFILTER as OPENMS_IDFILTER_REFINED } from '../../modules/local/openms_idfilter' addParams( options: openms_id_filter_qvalue_options ) workflow REFINE_FDR_ON_PREDICTED_SUBSET { // Define the input parameters @@ -33,7 +30,7 @@ workflow REFINE_FDR_ON_PREDICTED_SUBSET { classI_alleles main: - ch_software_versions = Channel.empty() + ch_versions = Channel.empty() // Export filtered percolator results as mztab OPENMS_MZTABEXPORTERPERC( filtered_perc_output ) ch_versions = ch_versions.mix(OPENMS_MZTABEXPORTERPERC.out.versions) @@ -41,17 +38,17 @@ workflow REFINE_FDR_ON_PREDICTED_SUBSET { OPENMS_MZTABEXPORTERPSM( psm_features ) ch_versions = ch_versions.mix(OPENMS_MZTABEXPORTERPSM.out.versions) // Predict psm results using mhcflurry to shrink search space - PREDICT_PSMS( + MHCFLURRY_PREDICTPSMS( OPENMS_MZTABEXPORTERPERC.out.mztab .join( OPENMS_MZTABEXPORTERPSM.out.mztab, by:[0] ) .map{ it -> [it[0].sample, it[0], it[1], it[2]] } .combine( classI_alleles, by:0) .map(it -> [it[1], it[2], it[3], it[4]]) ) - ch_versions = ch_versions.mix(PREDICT_PSMS.out.versions) + ch_versions = ch_versions.mix(MHCFLURRY_PREDICTPSMS.out.versions) // Filter psm results by shrinked search space - OPENMS_IDFILTER_PSMS(psm_features.combine( PREDICT_PSMS.out.idxml, by: [0] )) + OPENMS_IDFILTER_PSMS(psm_features.combine( MHCFLURRY_PREDICTPSMS.out.idxml, by: [0] )) ch_versions = ch_versions.mix(OPENMS_IDFILTER_PSMS.out.versions) // Recompute percolator fdr on shrinked search space OPENMS_PERCOLATORADAPTER( OPENMS_IDFILTER_PSMS.out.idxml ) @@ -62,5 +59,5 @@ workflow REFINE_FDR_ON_PREDICTED_SUBSET { emit: // Define the information that is returned by this workflow filter_refined_q_value = OPENMS_IDFILTER_REFINED.out.idxml - versions = ch_software_versions + versions = ch_versions } From 5ce8c2222e9938579d6515fed2fe509390b57b91 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:54:54 +0100 Subject: [PATCH 224/227] Update mhcquant.nf --- workflows/mhcquant.nf | 59 +++++++++++++++++++++++-------------------- 1 file changed, 32 insertions(+), 27 deletions(-) diff --git a/workflows/mhcquant.nf b/workflows/mhcquant.nf index 8b6e140f..818c0389 100644 --- a/workflows/mhcquant.nf +++ b/workflows/mhcquant.nf @@ -69,16 +69,12 @@ def modules = params.modules.clone() def multiqc_options = modules['multiqc'] multiqc_options.args += params.multiqc_title ? Utils.joinModuleArgs(["--title \"$params.multiqc_title\""]) : '' -def openms_map_aligner_identification_options = modules['openms_map_aligner_identification'] -def openms_comet_adapter_options = modules['openms_comet_adapter'] def generate_proteins_from_vcf_options = modules['generate_proteins_from_vcf'] -def percolator_adapter_options = modules['percolator_adapter'] -def id_filter_options = modules['id_filter'] -def id_filter_for_alignment_options = id_filter_options.clone() -def id_filter_whitelist_options = modules['id_filter_whitelist'] +generate_proteins_from_vcf_options.args += params.variant_indel_filter ? Utils.joinModuleArgs(['-fINDEL']) : '' +generate_proteins_from_vcf_options.args += params.variant_frameshift_filter ? Utils.joinModuleArgs(['-fFS']) : '' +generate_proteins_from_vcf_options.args += params.variant_snp_filter ? Utils.joinModuleArgs(['-fSNP']) : '' -id_filter_options.args += " -score:pep " + params.fdr_threshold -id_filter_for_alignment_options.args += " -score:pep " + (params.fdr_threshold == '0.01') ? Utils.joinModuleArgs(['-score:pep 0.05']) : Utils.joinModuleArgs(['-score:pep ' + params.fdr_threshold]) +def openms_comet_adapter_options = modules['openms_comet_adapter'] openms_comet_adapter_options.args += params.use_x_ions ? Utils.joinModuleArgs(['-use_X_ions true']) : '' openms_comet_adapter_options.args += params.use_z_ions ? Utils.joinModuleArgs(['-use_Z_ions true']) : '' openms_comet_adapter_options.args += params.use_a_ions ? Utils.joinModuleArgs(['-use_A_ions true']) : '' @@ -86,18 +82,28 @@ openms_comet_adapter_options.args += params.use_c_ions ? Utils.joinModuleArgs([' openms_comet_adapter_options.args += params.use_NL_ions ? Utils.joinModuleArgs(['-use_NL_ions true']) : '' openms_comet_adapter_options.args += params.remove_precursor_peak ? Utils.joinModuleArgs(['-remove_precursor_peak yes']) : '' -generate_proteins_from_vcf_options.args += params.variant_indel_filter ? Utils.joinModuleArgs(['-fINDEL']) : '' -generate_proteins_from_vcf_options.args += params.variant_frameshift_filter ? Utils.joinModuleArgs(['-fFS']) : '' -generate_proteins_from_vcf_options.args += params.variant_snp_filter ? Utils.joinModuleArgs(['-fSNP']) : '' -percolator_adapter_options.args += (params.fdr_level != 'psm-level-fdrs') ? Utils.joinModuleArgs(['-'+params.fdr_level]) : '' +def openms_id_filter_options = modules['openms_id_filter'] +def openms_id_filter_refiner_options = modules['openms_id_filter_refiner'] +def openms_id_filter_for_alignment_options = openms_id_filter_options.clone() +def openms_id_filter_whitelist_options = modules['openms_id_filter_whitelist'] +openms_id_filter_options.args += " -score:pep " + params.fdr_threshold +openms_id_filter_refiner_options.args += " -score:pep " + params.fdr_threshold +openms_id_filter_for_alignment_options.args += " -score:pep " + (params.fdr_threshold == '0.01') ? Utils.joinModuleArgs(['-score:pep 0.05']) : Utils.joinModuleArgs(['-score:pep ' + params.fdr_threshold]) +def openms_id_filter_qvalue_options = openms_id_filter_options.clone() +openms_id_filter_qvalue_options.suffix = "filtered" -percolator_adapter_options.suffix = "all_ids_merged_psm_perc" - -def percolator_adapter_klammer_options = percolator_adapter_options.clone() -percolator_adapter_klammer_options.args += " -klammer" - -def id_filter_qvalue_options = id_filter_options.clone() -id_filter_qvalue_options.suffix = "filtered" +def openms_map_aligner_identification_options = modules['openms_map_aligner_identification'] +def openms_mztab_exporter_perc_options = modules['openms_mztab_exporter_perc'] +def openms_mztab_exporter_psm_options = modules['openms_mztab_exporter_psm'] +def openms_percolator_adapter_options = modules['openms_percolator_adapter'] +def openms_percolator_adapter_refine_options = modules['openms_percolator_adapter_refine'] +openms_percolator_adapter_options.args += (params.fdr_level != 'psm-level-fdrs') ? Utils.joinModuleArgs(['-'+params.fdr_level]) : '' +openms_percolator_adapter_refine_options.args += (params.fdr_level != 'psm-level-fdrs') ? Utils.joinModuleArgs(['-'+params.fdr_level]) : '' +def openms_percolator_adapter_klammer_options = openms_percolator_adapter_options.clone() +openms_percolator_adapter_klammer_options.args += " -klammer" + +def openms_rt_predict_peptides_options = modules['openms_rt_predict_peptides'] +def openms_rt_predict_neo_epitopes_options = modules['openms_rt_predict_neo_epitopes'] //////////////////////////////////////////////////// /* -- CREATE CHANNELS -- */ @@ -112,8 +118,8 @@ include { OPENMS_PEAKPICKERHIRES } from '../modules/loc include { OPENMS_COMETADAPTER } from '../modules/local/openms_cometadapter' addParams( options: openms_comet_adapter_options ) include { OPENMS_PEPTIDEINDEXER } from '../modules/local/openms_peptideindexer' addParams( options: [:] ) include { OPENMS_FALSEDISCOVERYRATE } from '../modules/local/openms_falsediscoveryrate' addParams( options: [:] ) -include { OPENMS_IDFILTER as OPENMS_IDFILTER_FOR_ALIGNMENT }from '../modules/local/openms_idfilter' addParams( options: id_filter_for_alignment_options ) -include { OPENMS_IDFILTER as OPENMS_IDFILTER_Q_VALUE } from '../modules/local/openms_idfilter' addParams( options: id_filter_qvalue_options ) +include { OPENMS_IDFILTER as OPENMS_IDFILTER_FOR_ALIGNMENT }from '../modules/local/openms_idfilter' addParams( options: openms_id_filter_for_alignment_options ) +include { OPENMS_IDFILTER as OPENMS_IDFILTER_Q_VALUE } from '../modules/local/openms_idfilter' addParams( options: openms_id_filter_qvalue_options ) include { OPENMS_MAPALIGNERIDENTIFICATION } from '../modules/local/openms_mapaligneridentification' addParams( options: openms_map_aligner_identification_options ) include { @@ -122,10 +128,10 @@ include { include { OPENMS_IDMERGER } from '../modules/local/openms_idmerger' addParams( options: [:] ) include { OPENMS_PSMFEATUREEXTRACTOR } from '../modules/local/openms_psmfeatureextractor' addParams( options: [:] ) -include { OPENMS_PERCOLATORADAPTER } from '../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_options ) -include { OPENMS_PERCOLATORADAPTER as OPENMS_PERCOLATORADAPTER_KLAMMER } from '../modules/local/openms_percolatoradapter' addParams( options: percolator_adapter_klammer_options ) +include { OPENMS_PERCOLATORADAPTER } from '../modules/local/openms_percolatoradapter' addParams( options: openms_percolator_adapter_options ) +include { OPENMS_PERCOLATORADAPTER as OPENMS_PERCOLATORADAPTER_KLAMMER } from '../modules/local/openms_percolatoradapter' addParams( options: openms_percolator_adapter_klammer_options ) -include { REFINE_FDR_ON_PREDICTED_SUBSET } from '../subworkflows/local/refine_fdr_on_predicted_subset' addParams( run_percolator_options : percolator_adapter_options, filter_options: id_filter_options, whitelist_filter_options: id_filter_whitelist_options) +include { REFINE_FDR_ON_PREDICTED_SUBSET } from '../subworkflows/local/refine_fdr_on_predicted_subset' addParams( exporter_prec_options : openms_mztab_exporter_perc_options, exporter_psm_options : openms_mztab_exporter_psm_options, run_percolator_options : openms_percolator_adapter_refine_options, whitelist_filter_options: openms_id_filter_whitelist_options, filter_options: openms_id_filter_refiner_options) include { OPENMS_FEATUREFINDERIDENTIFICATION } from '../modules/local/openms_featurefinderidentification' addParams( options: [:] ) include { OPENMS_FEATURELINKERUNLABELEDKD } from '../modules/local/openms_featurelinkerunlabeledkd' addParams( options: [:] ) @@ -147,8 +153,8 @@ include { MHCNUGGETS_PREDICTNEOEPITOPESCLASS2 } from '../modules/loc include { MHCNUGGETS_NEOEPITOPESCLASS2POST } from '../modules/local/mhcnuggets_neoepitopesclass2post' addParams( options: [:] ) include { OPENMS_RTMODEL } from '../modules/local/openms_rtmodel' addParams( options: [:] ) -include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_FOUND_PEPTIDES} from '../modules/local/openms_rtpredict' addParams( options: [suffix:"_id_files_for_rt_prediction_RTpredicted"] ) -include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_NEOEPITOPES} from '../modules/local/openms_rtpredict' addParams( options: [suffix:"_txt_file_for_rt_prediction_RTpredicted"] ) +include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_FOUND_PEPTIDES} from '../modules/local/openms_rtpredict' addParams( options: openms_rt_predict_peptides_options ) +include { OPENMS_RTPREDICT as OPENMS_RTPREDICT_NEOEPITOPES} from '../modules/local/openms_rtpredict' addParams( options: openms_rt_predict_neo_epitopes_options ) include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/modules/custom/dumpsoftwareversions/main' addParams( options: [publish_files : ['_versions.yml':'']] ) include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' addParams( options: multiqc_options ) @@ -251,7 +257,6 @@ workflow MHCQUANT { // Compute alignment rt transformatio OPENMS_MAPALIGNERIDENTIFICATION(ch_grouped_fdr_filtered) ch_versions = ch_versions.mix(OPENMS_MAPALIGNERIDENTIFICATION.out.versions.first().ifEmpty(null)) - // TODO: Why are there 5 versions printed? // Intermediate step to join RT transformation files with mzml and idxml channels ms_files.mzml .mix(OPENMS_THERMORAWFILEPARSER.out.mzml) From 49d7befb95d89b62cf9c6ac2426e48c8af6aad29 Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:56:10 +0100 Subject: [PATCH 225/227] Update mhcflurry_predictpsms.nf --- modules/local/mhcflurry_predictpsms.nf | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/modules/local/mhcflurry_predictpsms.nf b/modules/local/mhcflurry_predictpsms.nf index 1cff8eb4..469b061b 100644 --- a/modules/local/mhcflurry_predictpsms.nf +++ b/modules/local/mhcflurry_predictpsms.nf @@ -12,11 +12,11 @@ process MHCFLURRY_PREDICTPSMS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'Intermediate_Results', publish_id:'Intermediate_Results') } - conda (params.enable_conda ? "bioconda::mhcflurry=1.4.3" : null) + conda (params.enable_conda ? "bioconda::fred2=2.0.6 bioconda::mhcflurry=1.4.3 bioconda::mhcnuggets=2.3.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mhcflurry:1.4.3--py_0" + container "https://depot.galaxyproject.org/singularity/mulled-v2-689ae0756dd82c61400782baaa8a7a1c2289930d:a9e10ca22d4cbcabf6b54f0fb5d766ea16bb171e-0" } else { - container "quay.io/biocontainers/mhcflurry:1.4.3--py_0" + container "quay.io/biocontainers/mulled-v2-689ae0756dd82c61400782baaa8a7a1c2289930d:a9e10ca22d4cbcabf6b54f0fb5d766ea16bb171e-0" } input: @@ -35,7 +35,9 @@ process MHCFLURRY_PREDICTPSMS { cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: + mhcnuggets: \$(echo \$(python -c "import pkg_resources; print('mhcnuggets' + pkg_resources.get_distribution('mhcnuggets').version)" | sed 's/^mhcnuggets//; s/ .*\$//' )) mhcflurry: \$(echo \$(mhcflurry-predict --version 2>&1 | sed 's/^mhcflurry //; s/ .*\$//') ) + fred2: \$(echo \$(python -c "import pkg_resources; print('fred2' + pkg_resources.get_distribution('Fred2').version)" | sed 's/^fred2//; s/ .*\$//')) END_VERSIONS """ From 4081a0dfca02f82e74651b0723ce469877081e1c Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:57:16 +0100 Subject: [PATCH 226/227] Update modules.config --- conf/modules.config | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conf/modules.config b/conf/modules.config index bed18e01..5f31dc23 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -55,11 +55,11 @@ params { } 'openms_mztab_exporter_perc' { - suffix = "all_ids_merged_psm_perc_filtered" + suffix = "all_ids_merged_psm_perc_filtered" } 'openms_mztab_exporter_psm' { - suffix = "all_ids_merged" + suffix = "all_ids_merged" } 'openms_percolator_adapter' { From df4729f993cb932318cfccdd9aef27ec3b4bd0ab Mon Sep 17 00:00:00 2001 From: Marissa Dubbelaar <71317334+marissaDubbelaar@users.noreply.github.com> Date: Thu, 9 Dec 2021 14:29:58 +0100 Subject: [PATCH 227/227] Update CHANGELOG.md Change date to today --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e90f6ee1..c720e121 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # nf-core/mhcquant: Changelog -## v2.1.0 nf-core/mhcquant "Olive Tin Hamster" - 2021/12/08 +## v2.1.0 nf-core/mhcquant "Olive Tin Hamster" - 2021/12/09 ### `Added`
\n" + for (param in group_params.keySet()) { + summary_section += "
$param
${group_params.get(param) ?: 'N/A'}
\n" + } + summary_section += "