diff --git a/.github/.coveragerc b/.github/.coveragerc index 24a419ae07..cbdcccdac0 100644 --- a/.github/.coveragerc +++ b/.github/.coveragerc @@ -2,4 +2,3 @@ omit = nf_core/*-template/* source = nf_core relative_files = True - diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 71411be1b9..9dbd7a1f6b 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -6,7 +6,7 @@ These are the most common things requested on pull requests (PRs). Remember that PRs should be made against the dev branch, unless you're preparing a release. -Learn more about contributing: https://github.com/nf-core/tools/tree/master/.github/CONTRIBUTING.md +Learn more about contributing: https://github.com/nf-core/tools/tree/main/.github/CONTRIBUTING.md --> ## PR checklist diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index 9a1905c7a0..d64799382f 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -6,14 +6,13 @@ 4. Check that modules/subworkflows in template are up to date with the latest releases 5. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:latest`. 6. Make sure all CI tests are passing! -7. Create a PR from `dev` to `master` -8. Make sure all CI tests are passing again (additional tests are run on PRs to `master`) +7. Create a PR from `dev` to `main` +8. Make sure all CI tests are passing again (additional tests are run on PRs to `main`) 9. Request review (2 approvals required) -10. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) -11. Merge the PR into `master` -12. Wait for CI tests on the commit to passed -13. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. -14. Create a new release copying the `CHANGELOG` for that release into the description section. +10. Merge the PR into `main` +11. Wait for CI tests on the commit to passed +12. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. +13. Create a new release copying the `CHANGELOG` for that release into the description section. ## After release @@ -21,3 +20,4 @@ 2. Check that the automatic `PyPi` deployment has worked: [pypi.org/project/nf-core](https://pypi.org/project/nf-core/) 3. Check `BioConda` has an automated PR to bump the version, and merge. eg. [bioconda/bioconda-recipes #20065](https://github.com/bioconda/bioconda-recipes/pull/20065) 4. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:dev`. +5. Run `rich-codex` on the [tools/website repo](https://github.com/nf-core/website/actions/workflows/rich-codex.yml) to regenerate docs screengrabs (actions `workflow_dispatch` button) diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 8760901db1..3ef0760513 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -53,12 +53,24 @@ runs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + shell: bash + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX shell: bash run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-lint-wf + # Add empty ro-crate file + - name: add empty ro-crate file + shell: bash + run: touch nf-core-testpipeline/ro-crate-metadata.json + working-directory: create-lint-wf + # Run nf-core pipelines linting - name: nf-core pipelines lint shell: bash @@ -77,7 +89,7 @@ runs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-lint-wf/log.txt diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index bbac1cc6ff..9775c70e10 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -1,15 +1,15 @@ name: nf-core branch protection -# This workflow is triggered on PRs to master branch on the repository -# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` +# This workflow is triggered on PRs to main branch on the repository +# It fails when someone tries to make a PR against the nf-core `main` branch instead of `dev` on: pull_request_target: - branches: [master] + branches: [main] jobs: test: runs-on: ubuntu-latest steps: - # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches + # PRs to the nf-core repo main branch are only ok if coming from the nf-core repo `dev` or any `patch` branches - name: Check PRs if: github.repository == 'nf-core/tools' run: | @@ -21,7 +21,7 @@ jobs: uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 with: message: | - ## This PR is against the `master` branch :x: + ## This PR is against the `main` branch :x: * Do not close this PR * Click _Edit_ and change the `base` to `dev` @@ -31,9 +31,9 @@ jobs: Hi @${{ github.event.pull_request.user.login }}, - It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch. - The `master` branch on nf-core repositories should always contain code from the latest release. - Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. + It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `main` branch. + The `main` branch on nf-core repositories should always contain code from the latest release. + Because of this, PRs to `main` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. Note that even after this, the test will continue to show as failing until you push a new commit. diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index cebcc854bc..d4fe34c25a 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -19,7 +19,7 @@ jobs: ) steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} @@ -36,7 +36,7 @@ jobs: fi gh pr checkout $PR_NUMBER - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -65,7 +65,7 @@ jobs: echo "File changed: ${{ env.changed }}" - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" cache: "pip" diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index f07b31e9de..37ab71bc3b 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -38,7 +38,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" steps: - name: go to subdirectory and change nextflow workdir @@ -48,12 +48,12 @@ jobs: export NXF_WORK=$(pwd) # Get the repo code - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository # Set up nf-core/tools - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" cache: pip @@ -78,6 +78,11 @@ jobs: run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: find nf-core-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-lint-wf + # Run the other nf-core commands - name: nf-core pipelines list run: nf-core --log-file log.txt pipelines list diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 1fb521b4bb..fffa9ffe7a 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -40,7 +40,7 @@ jobs: - name: 🏗 Set up yq uses: frenck/action-setup-yq@v1 - name: checkout - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Create Matrix id: create_matrix run: | @@ -77,11 +77,11 @@ jobs: cd create-lint-wf-template export NXF_WORK=$(pwd) - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -137,16 +137,32 @@ jobs: run: find my-prefix-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; working-directory: create-test-lint-wf + # Uncomment includeConfig statement + - name: uncomment include config + run: find my-prefix-testpipeline -type f -exec sed -i 's/\/\/ includeConfig/includeConfig/' {} \; + working-directory: create-test-lint-wf + # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-test-lint-wf + # Add empty ro-crate file + - name: add empty ro-crate file + run: touch my-prefix-testpipeline/ro-crate-metadata.json + working-directory: create-test-lint-wf + # Run nf-core linting - name: nf-core pipelines lint run: nf-core --log-file log.txt --hide-progress pipelines lint --dir my-prefix-testpipeline --fail-warned working-directory: create-test-lint-wf + # Run code style linting + - name: run pre-commit + shell: bash + run: pre-commit run --all-files + working-directory: create-test-lint-wf + # Run bump-version - name: nf-core pipelines bump-version run: nf-core --log-file log.txt pipelines bump-version --dir my-prefix-testpipeline/ 1.1 @@ -163,7 +179,7 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: nf-core-log-file-${{ matrix.TEMPLATE }} path: create-test-lint-wf/artifact_files.tar diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 56c6c822a9..93581b9153 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -39,7 +39,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" steps: - name: go to working directory @@ -48,11 +48,11 @@ jobs: cd create-test-wf export NXF_WORK=$(pwd) - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -71,15 +71,11 @@ jobs: mkdir create-test-wf && cd create-test-wf export NXF_WORK=$(pwd) nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" - # echo current directory - pwd - # echo content of current directory - ls -la - nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results + nextflow run nf-core-testpipeline -profile self_hosted_runner,test --outdir ./results - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-test-wf/log.txt diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 1202891e4d..d2b631f15f 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -13,11 +13,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index 4334871c4c..6d749b0d98 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -32,7 +32,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} # Install and run pre-commit - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index 3bddd42d49..7ffd6e9df9 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -18,10 +18,10 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" cache: "pip" diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index c613e13a2d..6409335ac7 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Build nfcore/tools:dev docker image run: docker build --no-cache . -t nfcore/tools:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 5a076f6d3b..d29b03b687 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Build nfcore/tools:latest docker image run: docker build --no-cache . -t nfcore/tools:latest diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index dc88031886..76d5d710c0 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -49,7 +49,7 @@ jobs: steps: - name: Check conditions id: conditions - run: echo "run-tests=${{ github.ref == 'refs/heads/master' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" + run: echo "run-tests=${{ github.ref == 'refs/heads/main' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" outputs: python-version: ${{ matrix.python-version }} @@ -61,7 +61,7 @@ jobs: name: Get test file matrix runs-on: "ubuntu-latest" steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: List tests @@ -87,11 +87,11 @@ jobs: cd pytest export NXF_WORK=$(pwd) - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out source-code repository - name: Set up Python ${{ needs.setup.outputs.python-version }} - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: ${{ needs.setup.outputs.python-version }} cache: "pip" @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --cov-config=.coveragerc --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" @@ -142,21 +142,23 @@ jobs: exit 1 fi - - name: Store snapshot report - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 - if: always() - with: - name: Snapshot Report ${{ matrix.test }} - path: ./snapshot_report.html - - name: remove slashes from test name run: | test=$(echo ${{ matrix.test }} | sed 's/\//__/g') echo "test=${test}" >> $GITHUB_ENV + - name: Store snapshot report + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 + if: always() + with: + include-hidden-files: true + name: Snapshot Report ${{ env.test }} + path: ./snapshot_report.html + - name: Upload coverage - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: + include-hidden-files: true name: coverage_${{ env.test }} path: .coverage @@ -170,9 +172,9 @@ jobs: mkdir -p pytest cd pytest - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 env: AGENT_TOOLSDIRECTORY: /opt/actions-runner/_work/tools/tools/ with: @@ -189,14 +191,14 @@ jobs: mv .github/.coveragerc . - name: Download all artifacts - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4 - name: Run coverage run: | coverage combine --keep coverage*/.coverage* coverage report coverage xml - - uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be # v4 + - uses: codecov/codecov-action@5c47607acb93fed5485fdbf7232e8a31425f672a # v5 with: files: coverage.xml env: diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml deleted file mode 100644 index cd12b139d3..0000000000 --- a/.github/workflows/rich-codex.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Generate images for docs -on: - workflow_dispatch: -jobs: - rich_codex: - runs-on: ubuntu-latest - steps: - - name: Check out the repo - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 - - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 - with: - python-version: 3.x - cache: pip - cache-dependency-path: setup.py - - - name: Install Nextflow - uses: nf-core/setup-nextflow@v2 - - - name: Install nf-test - uses: nf-core/setup-nf-test@v1 - - - name: Install nf-core/tools - run: pip install git+https://github.com/nf-core/tools.git@dev - - - name: Generate terminal images with rich-codex - uses: ewels/rich-codex@8ce988cc253c240a3027ba58e33e47640935dd8b # v1 - env: - COLUMNS: 100 - HIDE_PROGRESS: "true" - with: - commit_changes: "true" - clean_img_paths: docs/images/*.svg - terminal_width: 100 - before_command: > - which nextflow && - which nf-core && - nextflow -version && - nf-core --version diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 55880e8130..625f00d247 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -1,7 +1,7 @@ name: Sync template on: - release: - types: [published] + # release: + # types: [published] workflow_dispatch: inputs: testpipeline: @@ -19,6 +19,10 @@ on: description: "Force a PR to be created" type: boolean default: false + pipeline: + description: "Pipeline to sync" + type: string + default: "all" # Cancel if a newer run is started concurrency: @@ -35,6 +39,14 @@ jobs: run: | if [ "${{ github.event.inputs.testpipeline }}" == "true" ]; then echo '{"pipeline":["testpipeline"]}' > pipeline_names.json + elif [ "${{ github.event.inputs.pipeline }}" != "all" ] && [ "${{ github.event.inputs.pipeline }}" != "" ]; then + curl -O https://nf-co.re/pipeline_names.json + # check if the pipeline exists + if ! grep -q "\"${{ github.event.inputs.pipeline }}\"" pipeline_names.json; then + echo "Pipeline ${{ github.event.inputs.pipeline }} does not exist" + exit 1 + fi + echo '{"pipeline":["${{ github.event.inputs.pipeline }}"]}' > pipeline_names.json else curl -O https://nf-co.re/pipeline_names.json fi @@ -48,10 +60,10 @@ jobs: matrix: ${{fromJson(needs.get-pipelines.outputs.matrix)}} fail-fast: false steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out nf-core/tools - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 name: Check out nf-core/${{ matrix.pipeline }} with: repository: nf-core/${{ matrix.pipeline }} @@ -61,7 +73,7 @@ jobs: fetch-depth: "0" - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -82,7 +94,7 @@ jobs: run: | git config --global user.email "core@nf-co.re" git config --global user.name "nf-core-bot" - nf-core --log-file sync_log_${{ matrix.pipeline }}.txt sync -d nf-core/${{ matrix.pipeline }} \ + nf-core --log-file sync_log_${{ matrix.pipeline }}.txt pipelines sync -d nf-core/${{ matrix.pipeline }} \ --from-branch dev \ --pull-request \ --username nf-core-bot \ @@ -90,7 +102,7 @@ jobs: - name: Upload sync log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: sync_log_${{ matrix.pipeline }} path: sync_log_${{ matrix.pipeline }}.txt diff --git a/.github/workflows/update-textual-snapshots.yml b/.github/workflows/update-textual-snapshots.yml index fb936762f8..9706ef55c0 100644 --- a/.github/workflows/update-textual-snapshots.yml +++ b/.github/workflows/update-textual-snapshots.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -33,7 +33,7 @@ jobs: # Install dependencies and run pytest - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" cache: "pip" @@ -46,7 +46,7 @@ jobs: - name: Run pytest to update snapshots id: pytest run: | - python3 -m pytest tests/test_create_app.py --snapshot-update --color=yes --durations=0 + python3 -m pytest tests/pipelines/test_create_app.py --snapshot-update --color=yes --durations=0 continue-on-error: true # indication that the run has finished diff --git a/.github/workflows/update_components_template.yml b/.github/workflows/update_components_template.yml index e2ecebfcb4..575065d383 100644 --- a/.github/workflows/update_components_template.yml +++ b/.github/workflows/update_components_template.yml @@ -11,10 +11,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.x" diff --git a/.gitignore b/.gitignore index a3721da86e..7fe467abc9 100644 --- a/.gitignore +++ b/.gitignore @@ -113,7 +113,6 @@ ENV/ # Jetbrains IDEs .idea pip-wheel-metadata -.vscode .*.sw? # Textual diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f08d8419d..1494f58182 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.0 + rev: v0.8.1 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix @@ -13,13 +13,13 @@ repos: - prettier@3.3.3 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: "2.7.3" + rev: "3.0.3" hooks: - id: editorconfig-checker alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.11.1" + rev: "v1.13.0" hooks: - id: mypy additional_dependencies: diff --git a/CHANGELOG.md b/CHANGELOG.md index 1bcc937852..da5f72c357 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,18 +1,103 @@ # nf-core/tools: Changelog -## v3.0.0dev +## v3.0.3dev + +### Template + +- Keep pipeline name in version.yml file ([#3223](https://github.com/nf-core/tools/pull/3223)) +- Fix Manifest DOI text ([#3224](https://github.com/nf-core/tools/pull/3224)) +- Do not assume pipeline name is url ([#3225](https://github.com/nf-core/tools/pull/3225)) +- fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) +- Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) +- Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) +- Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) +- Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) +- Use `params.monochrome_logs` in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) +- Fix some typos and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) +- Add `manifest.contributors` to `nextflow.config` ([#3311](https://github.com/nf-core/tools/pull/3311)) + +### Download + +- First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- Further steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Enable limited support for `oras://` container paths (_only absolute URIs, no flexible registries like with Docker_) and prevent unnecessary image downloads for Seqera Container modules with `reconcile_seqera_container_uris()` ([#3293](https://github.com/nf-core/tools/pull/3293)). +- Update dawidd6/action-download-artifact action to v7 ([#3306](https://github.com/nf-core/tools/pull/3306)) + +### Linting + +- allow mixed `str` and `dict` entries in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) + +### Modules + +- add a panel around diff previews when updating ([#3246](https://github.com/nf-core/tools/pull/3246)) + +### Subworkflows + +- Add `nf-core subworkflows patch` command ([#2861](https://github.com/nf-core/tools/pull/2861)) + +### General + +- Include .nf-core.yml in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) +- create: add shortcut to toggle all switches ([#3226](https://github.com/nf-core/tools/pull/3226)) +- Remove unrelated values when saving `.nf-core` file ([#3227](https://github.com/nf-core/tools/pull/3227)) +- chore(deps): update pre-commit hook pre-commit/mirrors-mypy to v1.12.0 ([#3230](https://github.com/nf-core/tools/pull/3230)) +- chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.7.0 ([#3229](https://github.com/nf-core/tools/pull/3229)) +- Update python:3.12-slim Docker digest to 032c526 ([#3232](https://github.com/nf-core/tools/pull/3232)) +- use correct `--profile` options for `nf-core subworkflows test` ([#3233](https://github.com/nf-core/tools/pull/3233)) +- Update GitHub Actions ([#3237](https://github.com/nf-core/tools/pull/3237)) +- add `--dir/-d` option to schema commands ([#3247](https://github.com/nf-core/tools/pull/3247)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.1 ([#3250](https://github.com/nf-core/tools/pull/3250)) +- handle new schema structure in `nf-core pipelines create-params-file` ([#3276](https://github.com/nf-core/tools/pull/3276)) +- Update Gitpod image to use Miniforge instead of Miniconda([#3274](https://github.com/nf-core/tools/pull/3274)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.3 ([#3275](https://github.com/nf-core/tools/pull/3275)) +- Add hint to solve git errors with a synced repo ([#3279](https://github.com/nf-core/tools/pull/3279)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 ([#3282](https://github.com/nf-core/tools/pull/3282)) +- Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) +- Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) +- Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) +- Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) + +## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] + +### Template + +- Add null/ to .gitignore ([#3191](https://github.com/nf-core/tools/pull/3191)) +- Parallelize pipeline GHA tests over docker/conda/singularity ([#3214](https://github.com/nf-core/tools/pull/3214)) +- Fix `template_version_comment.yml` github action ([#3212](https://github.com/nf-core/tools/pull/3212)) +- Fix pre-commit linting on pipeline template ([#3218](https://github.com/nf-core/tools/pull/3218)) + +### Linting + +- Fix bug when linting schema params and when using `defaultIgnoreParams` ([#3213](https://github.com/nf-core/tools/pull/3213)) + +### General + +- Use updated pipeline commands in docstrings ([#3215](https://github.com/nf-core/tools/pull/3215)) +- Disable automatic sync on release, fix handling empty pipeline input ([#3217](https://github.com/nf-core/tools/pull/3217)) + +## [v3.0.1 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.1) - [2024-10-09] + +### Template + +- Fixed an issue where the linting CI action didn't read the correct file ([#3202](https://github.com/nf-core/tools/pull/3202)) +- Fixed condition for `awsfulltest` to run ([#3203](https://github.com/nf-core/tools/pull/3203)) +- Fix too many empty lines added by jinja ([#3204](https://github.com/nf-core/tools/pull/3204) and [#3206](https://github.com/nf-core/tools/pull/3206)) +- Fix header blocks in local subworkflow including git merge marker-like strings ([#3201](https://github.com/nf-core/tools/pull/3201)) +- Update included subworkflows and modules ([#3208](https://github.com/nf-core/tools/pull/3208)) + +## [v3.0.0 - Titanium Tapir](https://github.com/nf-core/tools/releases/tag/3.0.0) - [2024-10-08] **Highlights** -- Pipeline commands are renamed from `nf-core ` to `nf-core pipelines ` to follow the same command structure as modules and subworkflows commands. +- Pipeline commands are renamed from `nf-core ` to `nf-core pipelines ` to follow the same command structure as modules and subworkflows commands. - More customisation for pipeline templates. The template has been divided into features which can be skipped, e.g. you can create a new pipeline without any traces of FastQC in it. - A new Text User Interface app when running `nf-core pipelines create` to help us guide you through the process better (no worries, you can still use the cli if you give all values as parameters) - We replaced nf-validation with nf-schema in the pipeline template - CI tests now lint with the nf-core tools version matching the template version of the pipeline, to minimise errors in opened PRs with every new tools release. -- New command `nf-core pipelines ro-crate` to create a [Research Object (RO) crate](https://www.researchobject.org/ro-crate/) for a pipeline - `nf-core licences` command is deprecated. -- The structure of nf-core/tools pytests has been updated -- The structure of the API docs has been updated +- Changed default branch to `main`. +- The structure of nf-core/tools pytests has been updated. +- The structure of the API docs has been updated. ### Template @@ -23,21 +108,27 @@ - Remove release announcement for non nf-core pipelines ([#3072](https://github.com/nf-core/tools/pull/3072)) - handle template features with a yaml file ([#3108](https://github.com/nf-core/tools/pull/3108), [#3112](https://github.com/nf-core/tools/pull/3112)) - add option to exclude code linters for custom pipeline template ([#3084](https://github.com/nf-core/tools/pull/3084)) -- add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101)) +- add option to exclude citations for custom pipeline template ([#3101](https://github.com/nf-core/tools/pull/3101) and [#3169](https://github.com/nf-core/tools/pull/3169)) - add option to exclude gitpod for custom pipeline template ([#3100](https://github.com/nf-core/tools/pull/3100)) - add option to exclude codespaces from pipeline template ([#3105](https://github.com/nf-core/tools/pull/3105)) - add option to exclude multiqc from pipeline template ([#3103](https://github.com/nf-core/tools/pull/3103)) - add option to exclude changelog from custom pipeline template ([#3104](https://github.com/nf-core/tools/pull/3104)) - add option to exclude license from pipeline template ([#3125](https://github.com/nf-core/tools/pull/3125)) - add option to exclude email from pipeline template ([#3126](https://github.com/nf-core/tools/pull/3126)) -- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) - add option to exclude nf-schema from the template ([#3116](https://github.com/nf-core/tools/pull/3116)) - add option to exclude fastqc from pipeline template ([#3129](https://github.com/nf-core/tools/pull/3129)) - add option to exclude documentation from pipeline template ([#3130](https://github.com/nf-core/tools/pull/3130)) - add option to exclude test configs from pipeline template ([#3133](https://github.com/nf-core/tools/pull/3133)) - add option to exclude tower.yml from pipeline template ([#3134](https://github.com/nf-core/tools/pull/3134)) +- Use nf-schema instead of nf-validation ([#3116](https://github.com/nf-core/tools/pull/3116)) +- test pipeline with conda and singularity on PRs to master ([#3149](https://github.com/nf-core/tools/pull/3149)) - run nf-core lint `--release` on PRs to master ([#3148](https://github.com/nf-core/tools/pull/3148)) - Add tests to ensure all files are part of a template customisation group and all groups are tested ([#3099](https://github.com/nf-core/tools/pull/3099)) +- Update the syntax of `utils_nfcore_pipeline_pipeline` local subworkflow ([#3166](https://github.com/nf-core/tools/pull/3166)) +- Remove if/else block to include `igenomes.config` ([#3168](https://github.com/nf-core/tools/pull/3168)) +- Fixed release announcement hashtags for Mastodon ([#3099](https://github.com/nf-core/tools/pull/3176)) +- Remove try/catch blocks from `nextflow.config` ([#3167](https://github.com/nf-core/tools/pull/3167)) +- Extend `download_pipeline.yml` to count pre-downloaded container images. ([#3182](https://github.com/nf-core/tools/pull/3182)) ### Linting @@ -55,22 +146,35 @@ ### Pipeline create command -- Create: allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) -- Create: Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) -- Create app: display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) -- Pipelines: allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) +- Allow more special characters on the pipeline name for non-nf-core pipelines ([#3008](https://github.com/nf-core/tools/pull/3008)) +- Mock git cretentials to generate stable textual snapshots ([#3007](https://github.com/nf-core/tools/pull/3007)) +- Display input textbox with equally spaced grid ([#3038](https://github.com/nf-core/tools/pull/3038)) +- Allow numbers in custom pipeline name ([#3094](https://github.com/nf-core/tools/pull/3094)) ### Components - The `modules_nfcore` tag in the `main.nf.test` file of modules/subworkflows now displays the organization name in custom modules repositories ([#3005](https://github.com/nf-core/tools/pull/3005)) - Add `--migrate_pytest` option to `nf-core test` command ([#3085](https://github.com/nf-core/tools/pull/3085)) -- Components: allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Allow spaces at the beginning of include statements ([#3115](https://github.com/nf-core/tools/pull/3115)) +- Add option `--fix` to update the `meta.yml` file of subworkflows ([#3077](https://github.com/nf-core/tools/pull/3077)) + +### Download + +- Fully removed already deprecated `-t` / `--tower` flag. +- Refactored the CLI for consistency (short flag is usually second word, e.g. also for `--container-library` etc.): + +| Old parameter | New parameter | +| --------------------------------- | --------------------------------- | +| `-d` / `--download-configuration` | `-c` / `--download-configuration` | +| `-p` / `--parallel-downloads` | `-d` / `--parallel-downloads` | +| new parameter | `-p` / (`--platform`) | ### General +- Change default branch to `main` for the nf-core/tools repository - Update output of generation script for API docs to new structure ([#2988](https://github.com/nf-core/tools/pull/2988)) +- Remove `rich-codex.yml` action, images are now generated on the website repo ([#2989](https://github.com/nf-core/tools/pull/2989)) - Add no clobber and put bash options on their own line ([#2991](https://github.com/nf-core/tools/pull/2991)) -- update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) - move pipeline subcommands for v3.0 ([#2983](https://github.com/nf-core/tools/pull/2983)) - return directory if base_dir is the root directory ([#3003](https://github.com/nf-core/tools/pull/3003)) - Remove nf-core licences command ([#3012](https://github.com/nf-core/tools/pull/3012)) @@ -94,6 +198,7 @@ - Update python:3.12-slim Docker digest to 59c7332 ([#3124](https://github.com/nf-core/tools/pull/3124)) - Update pre-commit hook pre-commit/mirrors-mypy to v1.11.1 ([#3091](https://github.com/nf-core/tools/pull/3091)) - Update to pytest v8 and move it to dev dependencies ([#3058](https://github.com/nf-core/tools/pull/3058)) +- Update minimal textual version and snapshots ([#2998](https://github.com/nf-core/tools/pull/2998)) ## [v2.14.1 - Tantalum Toad - Patch](https://github.com/nf-core/tools/releases/tag/2.14.1) - [2024-05-09] @@ -137,7 +242,7 @@ ### Download -- Replace `--tower` with `--platform`. The former will remain for backwards compatability for now but will be removed in a future release. ([#2853](https://github.com/nf-core/tools/pull/2853)) +- Replace `--tower` with `--platform`. The former will remain for backwards compatibility for now but will be removed in a future release. ([#2853](https://github.com/nf-core/tools/pull/2853)) - Better error message when GITHUB_TOKEN exists but is wrong/outdated - New `--tag` argument to add custom tags during a pipeline download ([#2938](https://github.com/nf-core/tools/pull/2938)) @@ -446,7 +551,7 @@ - Refactored the CLI parameters related to container images. Although downloading other images than those of the Singularity/Apptainer container system is not supported for the time being, a generic name for the parameters seemed preferable. So the new parameter `--singularity-cache-index` introduced in [#2247](https://github.com/nf-core/tools/pull/2247) has been renamed to `--container-cache-index` prior to release ([#2336](https://github.com/nf-core/tools/pull/2336)). - To address issue [#2311](https://github.com/nf-core/tools/issues/2311), a new parameter `--container-library` was created allowing to specify the container library (registry) from which container images in OCI format (Docker) should be pulled ([#2336](https://github.com/nf-core/tools/pull/2336)). - Container detection in configs was improved. This allows for DSL2-like container definitions inside the container parameter value provided to process scopes [#2346](https://github.com/nf-core/tools/pull/2346). -- Add apptainer to the list of false positve container strings ([#2353](https://github.com/nf-core/tools/pull/2353)). +- Add apptainer to the list of false positive container strings ([#2353](https://github.com/nf-core/tools/pull/2353)). #### Updated CLI parameters @@ -480,7 +585,7 @@ _In addition, `-r` / `--revision` has been changed to a parameter that can be pr - GitPod base image: Always self-update to the latest version of Nextflow. Add [pre-commit](https://pre-commit.com/) dependency. - GitPod configs: Update Nextflow as an init task, init pre-commit in pipeline config. - Refgenie: Create `nxf_home/nf-core/refgenie_genomes.config` path if it doesn't exist ([#2312](https://github.com/nf-core/tools/pull/2312)) -- Add CI tests to test running a pipeline whe it's created from a template skipping different areas +- Add CI tests to test running a pipeline when it's created from a template skipping different areas ## [v2.8 - Ruthenium Monkey](https://github.com/nf-core/tools/releases/tag/2.8) - [2023-04-27] @@ -516,7 +621,7 @@ _In addition, `-r` / `--revision` has been changed to a parameter that can be pr - Add an `--empty-template` option to create a module without TODO statements or examples ([#2175](https://github.com/nf-core/tools/pull/2175) & [#2177](https://github.com/nf-core/tools/pull/2177)) - Removed the `nf-core modules mulled` command and all its code dependencies ([2199](https://github.com/nf-core/tools/pull/2199)). -- Take into accout the provided `--git_remote` URL when linting all modules ([2243](https://github.com/nf-core/tools/pull/2243)). +- Take into account the provided `--git_remote` URL when linting all modules ([2243](https://github.com/nf-core/tools/pull/2243)). ### Subworkflows @@ -925,7 +1030,7 @@ Please note that there are many excellent integrations for Prettier available, f - `input:` / `output:` not being specified in module - Allow for containers from other biocontainers resource as defined [here](https://github.com/nf-core/modules/blob/cde237e7cec07798e5754b72aeca44efe89fc6db/modules/cat/fastq/main.nf#L7-L8) - Fixed traceback when using `stageAs` syntax as defined [here](https://github.com/nf-core/modules/blob/cde237e7cec07798e5754b72aeca44efe89fc6db/modules/cat/fastq/main.nf#L11) -- Added `nf-core schema docs` command to output pipline parameter documentation in Markdown format for inclusion in GitHub and other documentation systems ([#741](https://github.com/nf-core/tools/issues/741)) +- Added `nf-core schema docs` command to output pipeline parameter documentation in Markdown format for inclusion in GitHub and other documentation systems ([#741](https://github.com/nf-core/tools/issues/741)) - Allow conditional process execution from the configuration file ([#1393](https://github.com/nf-core/tools/pull/1393)) - Add linting for when condition([#1397](https://github.com/nf-core/tools/pull/1397)) - Added modules ignored table to `nf-core modules bump-versions`. ([#1234](https://github.com/nf-core/tools/issues/1234)) @@ -944,7 +1049,7 @@ Please note that there are many excellent integrations for Prettier available, f - Update repo logos to utilize [GitHub's `#gh-light/dark-mode-only`](https://docs.github.com/en/github/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#specifying-the-theme-an-image-is-shown-to), to switch between logos optimized for light or dark themes. The old repo logos have to be removed (in `docs/images` and `assets/`). - Deal with authentication with private repositories -- Bump minimun Nextflow version to 21.10.3 +- Bump minimum Nextflow version to 21.10.3 - Convert pipeline template to updated Nextflow DSL2 syntax - Solve circular import when importing `nf_core.modules.lint` - Disable cache in `nf_core.utils.fetch_wf_config` while performing `test_wf_use_local_configs`. @@ -966,15 +1071,15 @@ Please note that there are many excellent integrations for Prettier available, f - Defaults in `nextflow.config` must now match the variable _type_ specified in the schema - If you want the parameter to not have a default value, use `null` - Strings set to `false` or an empty string in `nextflow.config` will now fail linting -- Bump minimun Nextflow version to 21.10.3 -- Changed `questionary` `ask()` to `unsafe_ask()` to not catch `KeyboardInterupts` ([#1237](https://github.com/nf-core/tools/issues/1237)) +- Bump minimum Nextflow version to 21.10.3 +- Changed `questionary` `ask()` to `unsafe_ask()` to not catch `KeyboardInterrupts` ([#1237](https://github.com/nf-core/tools/issues/1237)) - Fixed bug in `nf-core launch` due to revisions specified with `-r` not being added to nextflow command. ([#1246](https://github.com/nf-core/tools/issues/1246)) - Update regex in `readme` test of `nf-core lint` to agree with the pipeline template ([#1260](https://github.com/nf-core/tools/issues/1260)) - Update 'fix' message in `nf-core lint` to conform to the current command line options. ([#1259](https://github.com/nf-core/tools/issues/1259)) - Fixed bug in `nf-core list` when `NXF_HOME` is set - Run CI test used to create and lint/run the pipeline template with minimum and latest edge release of NF ([#1304](https://github.com/nf-core/tools/issues/1304)) - New YAML issue templates for tools bug reports and feature requests, with a much richer interface ([#1165](https://github.com/nf-core/tools/pull/1165)) -- Handle synax errors in Nextflow config nicely when running `nf-core schema build` ([#1267](https://github.com/nf-core/tools/pull/1267)) +- Handle syntax errors in Nextflow config nicely when running `nf-core schema build` ([#1267](https://github.com/nf-core/tools/pull/1267)) - Erase temporary files and folders while performing Python tests (pytest) - Remove base `Dockerfile` used for DSL1 pipeline container builds - Run tests with Python 3.10 @@ -1080,7 +1185,7 @@ This marks the first Nextflow DSL2-centric release of `tools` which means that s - Updated `nf-core modules install` and `modules.json` to work with new directory structure ([#1159](https://github.com/nf-core/tools/issues/1159)) - Updated `nf-core modules remove` to work with new directory structure [[#1159](https://github.com/nf-core/tools/issues/1159)] - Restructured code and removed old table style in `nf-core modules list` -- Fixed bug causing `modules.json` creation to loop indefinitly +- Fixed bug causing `modules.json` creation to loop indefinitely - Added `--all` flag to `nf-core modules install` - Added `remote` and `local` subcommands to `nf-core modules list` - Fix bug due to restructuring in modules template @@ -1161,7 +1266,7 @@ This marks the first Nextflow DSL2-centric release of `tools` which means that s ## [v1.13.2 - Copper Crocodile CPR :crocodile: :face_with_head_bandage:](https://github.com/nf-core/tools/releases/tag/1.13.2) - [2021-03-23] - Make module template pass the EC linter [[#953](https://github.com/nf-core/tools/pull/953)] -- Added better logging message if a user doesn't specificy the directory correctly with `nf-core modules` commands [[#942](https://github.com/nf-core/tools/pull/942)] +- Added better logging message if a user doesn't specify the directory correctly with `nf-core modules` commands [[#942](https://github.com/nf-core/tools/pull/942)] - Fixed parameter validation bug caused by JSONObject [[#937](https://github.com/nf-core/tools/issues/937)] - Fixed template creation error regarding file permissions [[#932](https://github.com/nf-core/tools/issues/932)] - Split the `create-lint-wf` tests up into separate steps in GitHub Actions to make the CI results easier to read @@ -1401,7 +1506,7 @@ making a pull-request. See [`.github/CONTRIBUTING.md`](.github/CONTRIBUTING.md) ### Linting - Refactored PR branch tests to be a little clearer. -- Linting error docs explain how to add an additional branch protecton rule to the `branch.yml` GitHub Actions workflow. +- Linting error docs explain how to add an additional branch protection rule to the `branch.yml` GitHub Actions workflow. - Adapted linting docs to the new PR branch tests. - Failure for missing the readme bioconda badge is now a warn, in case this badge is not relevant - Added test for template `{{ cookiecutter.var }}` placeholders diff --git a/CITATION.cff b/CITATION.cff index 017666c018..d1246b69d7 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -24,7 +24,7 @@ version: 2.4.1 doi: 10.1038/s41587-020-0439-x date-released: 2022-05-16 url: https://github.com/nf-core/tools -prefered-citation: +preferred-citation: type: article authors: - family-names: Ewels diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f9773296c1..ce36354331 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -153,7 +153,7 @@ Optionally followed by the description that you want to add to the changelog. - Update Textual snapshots: -If the Textual snapshots (run by `tests/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. +If the Textual snapshots (run by `tests/pipelines/test_crate_app.py`) fail, an HTML report is generated and uploaded as an artifact. If you are sure that these changes are correct, you can automatically update the snapshots form the PR by posting a comment with the magic words: ``` diff --git a/Dockerfile b/Dockerfile index fb1a867937..dc9948ea4b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:59c7332a4a24373861c4a5f0eec2c92b87e3efeb8ddef011744ef9a751b1d11c +FROM python:3.12-slim@sha256:2a6386ad2db20e7f55073f69a98d6da2cf9f168e05e7487d2670baeb9b7601c5 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" diff --git a/README.md b/README.md index 58fb708a0d..8a3e7d05e6 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@

- - nf-core/tools + + nf-core/tools

-[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=master&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amaster) -[![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) +[![Python tests](https://github.com/nf-core/tools/workflows/Python%20tests/badge.svg?branch=main&event=push)](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amain) +[![codecov](https://codecov.io/gh/nf-core/tools/branch/main/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) [![code style: prettier](https://img.shields.io/badge/code%20style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) [![code style: Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff) @@ -21,7 +21,7 @@ For documentation of the internal Python functions, please refer to the [Tools P ## Installation -For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/usage/tools). +For full installation instructions, please see the [nf-core documentation](https://nf-co.re/docs/nf-core-tools/installation). Below is a quick-start for those who know what they're doing: ### Bioconda diff --git a/docs/api/_src/api/index.md b/docs/api/_src/api/index.md index 035a896888..f25e166a90 100644 --- a/docs/api/_src/api/index.md +++ b/docs/api/_src/api/index.md @@ -8,4 +8,4 @@ This API documentation is for the [`nf-core/tools`](https://github.com/nf-core/t - [Module commands](./module_lint_tests/) (run by `nf-core modules lint`) - [Subworkflow commands](./subworkflow_lint_tests/) (run by `nf-core subworkflows lint`) - [nf-core/tools Python package API reference](./api/) - - [nf-core/tools pipeline commands API referece](./api/pipelines/) + - [nf-core/tools pipeline commands API reference](./api/pipelines/) diff --git a/docs/api/_src/api/utils.md b/docs/api/_src/api/utils.md new file mode 100644 index 0000000000..1353f97ef5 --- /dev/null +++ b/docs/api/_src/api/utils.md @@ -0,0 +1,9 @@ +# nf_core.utils + +```{eval-rst} +.. automodule:: nf_core.utils + :members: + :undoc-members: + :show-inheritance: + :private-members: +``` diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index bfdbd7888d..5a45483d9c 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -40,7 +40,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon"] +extensions = ["myst_parser", "sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinxcontrib.autodoc_pydantic"] # Add any paths that contain templates here, relative to this directory. templates_path = ["./_templates"] @@ -51,8 +51,8 @@ # source_suffix = ['.rst', '.md'] source_suffix = ".rst" -# The master toctree document. -master_doc = "index" +# The main toctree document. +main_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -133,7 +133,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, "nf-core.tex", "nf-core tools API documentation", "Phil Ewels, Sven Fillinger", "manual"), + (main_doc, "nf-core.tex", "nf-core tools API documentation", "Phil Ewels, Sven Fillinger", "manual"), ] @@ -141,7 +141,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "nf-core", "nf-core tools API documentation", [author], 1)] +man_pages = [(main_doc, "nf-core", "nf-core tools API documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -151,7 +151,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + main_doc, "nf-core", "nf-core tools API documentation", author, diff --git a/docs/api/_src/pipeline_lint_tests/included_configs.md b/docs/api/_src/pipeline_lint_tests/included_configs.md new file mode 100644 index 0000000000..f68f7da25e --- /dev/null +++ b/docs/api/_src/pipeline_lint_tests/included_configs.md @@ -0,0 +1,5 @@ +# included_configs + + ```{eval-rst} + .. automethod:: nf_core.pipelines.lint.PipelineLint.included_configs + ``` diff --git a/docs/api/_src/pipeline_lint_tests/index.md b/docs/api/_src/pipeline_lint_tests/index.md index 3575c08db4..4dd93442d2 100644 --- a/docs/api/_src/pipeline_lint_tests/index.md +++ b/docs/api/_src/pipeline_lint_tests/index.md @@ -7,6 +7,7 @@ - [base_config](./base_config/) - [files_exist](./files_exist/) - [files_unchanged](./files_unchanged/) + - [included_configs](./included_configs/) - [merge_markers](./merge_markers/) - [modules_config](./modules_config/) - [modules_json](./modules_json/) @@ -16,6 +17,7 @@ - [nfcore_yml](./nfcore_yml/) - [pipeline_name_conventions](./pipeline_name_conventions/) - [pipeline_todos](./pipeline_todos/) + - [plugin_includes](./plugin_includes/) - [readme](./readme/) - [schema_description](./schema_description/) - [schema_lint](./schema_lint/) diff --git a/docs/api/requirements.txt b/docs/api/requirements.txt index abffe30740..1d23f0b27d 100644 --- a/docs/api/requirements.txt +++ b/docs/api/requirements.txt @@ -1,3 +1,4 @@ +autodoc_pydantic Sphinx>=3.3.1 sphinxcontrib-napoleon sphinx-markdown-builder diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 0efea13ec9..9f16188e95 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -4,6 +4,7 @@ import logging import os import sys +from pathlib import Path import rich import rich.console @@ -35,6 +36,7 @@ pipelines_launch, pipelines_lint, pipelines_list, + pipelines_rocrate, pipelines_schema_build, pipelines_schema_docs, pipelines_schema_lint, @@ -85,7 +87,7 @@ }, { "name": "For developers", - "commands": ["create", "lint", "bump-version", "sync", "schema", "create-logo"], + "commands": ["create", "lint", "bump-version", "sync", "schema", "rocrate", "create-logo"], }, ], "nf-core modules": [ @@ -366,26 +368,18 @@ def command_pipelines_lint( help="Archive compression type", ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") -# TODO: Remove this in a future release. Deprecated in March 2024. -@click.option( - "-t", - "--tower", - is_flag=True, - default=False, - hidden=True, - help="Download for Seqera Platform. DEPRECATED: Please use `--platform` instead.", -) @click.option( + "-p", "--platform", is_flag=True, default=False, help="Download for Seqera Platform (formerly Nextflow Tower)", ) @click.option( - "-d", + "-c", "--download-configuration", - is_flag=True, - default=False, + type=click.Choice(["yes", "no"]), + default="no", help="Include configuration profiles in download. Not available with `--platform`", ) @click.option( @@ -420,7 +414,7 @@ def command_pipelines_lint( help="List of images already available in a remote `singularity.cacheDir`.", ) @click.option( - "-p", + "-d", "--parallel-downloads", type=int, default=4, @@ -434,7 +428,6 @@ def command_pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -454,7 +447,6 @@ def command_pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -579,6 +571,44 @@ def command_pipelines_list(ctx, keywords, sort, json, show_archived): pipelines_list(ctx, keywords, sort, json, show_archived) +# nf-core pipelines rocrate +@pipelines.command("rocrate") +@click.argument( + "pipeline_dir", + type=click.Path(exists=True), + default=Path.cwd(), + required=True, + metavar="", +) +@click.option( + "-j", + "--json_path", + default=Path.cwd(), + type=str, + help="Path to save RO Crate metadata json file to", +) +@click.option("-z", "--zip_path", type=str, help="Path to save RO Crate zip file to") +@click.option( + "-pv", + "--pipeline_version", + type=str, + help="Version of pipeline to use for RO Crate", + default="", +) +@click.pass_context +def rocrate( + ctx, + pipeline_dir: str, + json_path: str, + zip_path: str, + pipeline_version: str, +): + """ + Make an Research Object Crate + """ + pipelines_rocrate(ctx, pipeline_dir, json_path, zip_path, pipeline_version) + + # nf-core pipelines sync @pipelines.command("sync") @click.pass_context @@ -706,12 +736,24 @@ def pipeline_schema(): # nf-core pipelines schema validate @pipeline_schema.command("validate") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) @click.argument("pipeline", required=True, metavar="") @click.argument("params", type=click.Path(exists=True), required=True, metavar="") -def command_pipelines_schema_validate(pipeline, params): +def command_pipelines_schema_validate(directory, pipeline, params): """ Validate a set of parameters against a pipeline schema. """ + if Path(directory, pipeline).exists(): + # this is a local pipeline + pipeline = Path(directory, pipeline) + pipelines_schema_validate(pipeline, params) @@ -750,23 +792,39 @@ def command_pipelines_schema_build(directory, no_prompts, web_only, url): # nf-core pipelines schema lint @pipeline_schema.command("lint") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) @click.argument( - "schema_path", + "schema_file", type=click.Path(exists=True), default="nextflow_schema.json", metavar="", ) -def command_pipelines_schema_lint(schema_path): +def command_pipelines_schema_lint(directory, schema_file): """ Check that a given pipeline schema is valid. """ - pipelines_schema_lint(schema_path) + pipelines_schema_lint(Path(directory, schema_file)) # nf-core pipelines schema docs @pipeline_schema.command("docs") +@click.option( + "-d", + "--dir", + "directory", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) @click.argument( - "schema_path", + "schema_file", type=click.Path(exists=True), default="nextflow_schema.json", required=False, @@ -795,11 +853,11 @@ def command_pipelines_schema_lint(schema_path): help="CSV list of columns to include in the parameter tables (parameter,description,type,default,required,hidden)", default="parameter,description,type,default,required,hidden", ) -def command_pipelines_schema_docs(schema_path, output, format, force, columns): +def command_pipelines_schema_docs(directory, schema_file, output, format, force, columns): """ Outputs parameter documentation for a pipeline schema. """ - pipelines_schema_docs(schema_path, output, format, force, columns) + pipelines_schema_docs(Path(directory, schema_file), output, format, force, columns) # nf-core modules subcommands @@ -1005,7 +1063,7 @@ def command_modules_update( default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-r", "--remove", is_flag=True, default=False) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") def command_modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module @@ -1231,11 +1289,14 @@ def command_modules_test(ctx, tool, directory, no_prompts, update, once, profile is_flag=True, help="Fix the module version if a newer version is available", ) -def command_modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_modules_lint( + ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix +): """ Lint one or more modules in a directory. """ - modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version) + modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix) # nf-core modules info @@ -1378,7 +1439,7 @@ def command_subworkflows_create(ctx, subworkflow, directory, author, force, migr ) @click.option( "--profile", - type=click.Choice(["none", "singularity"]), + type=click.Choice(["docker", "singularity", "conda"]), default=None, help="Run tests with a specific profile", ) @@ -1476,11 +1537,14 @@ def command_subworkflows_list_local(ctx, keywords, json, directory): # pylint: help="Sort lint output by subworkflow or test name.", show_default=True, ) -def command_subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): +@click.option("--fix", is_flag=True, help="Fix all linting tests if possible.") +def command_subworkflows_lint( + ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix +): """ Lint one or more subworkflows in a directory. """ - subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by) + subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix) # nf-core subworkflows info @@ -1542,6 +1606,43 @@ def command_subworkflows_install(ctx, subworkflow, directory, prompt, force, sha subworkflows_install(ctx, subworkflow, directory, prompt, force, sha) +# nf-core subworkflows patch +@subworkflows.command("patch") +@click.pass_context +@click.argument("tool", type=str, required=False, metavar=" or ") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") +def subworkflows_patch(ctx, tool, dir, remove): + """ + Create a patch file for minor changes in a subworkflow + + Checks if a subworkflow has been modified locally and creates a patch file + describing how the module has changed from the remote version + """ + from nf_core.subworkflows import SubworkflowPatch + + try: + subworkflow_patch = SubworkflowPatch( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + if remove: + subworkflow_patch.remove(tool) + else: + subworkflow_patch.patch(tool) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + # nf-core subworkflows remove @subworkflows.command("remove") @click.pass_context @@ -1594,7 +1695,7 @@ def command_subworkflows_remove(ctx, directory, subworkflow): "limit_output", is_flag=True, default=False, - help="Limit ouput to only the difference in main.nf", + help="Limit output to only the difference in main.nf", ) @click.option( "-a", @@ -1696,7 +1797,7 @@ def command_schema_validate(pipeline, params): @click.option( "--url", type=str, - default="https://nf-co.re/pipeline_schema_builder", + default="https://oldsite.nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) def command_schema_build(directory, no_prompts, web_only, url): @@ -1812,13 +1913,14 @@ def command_create_logo(logo_text, directory, name, theme, width, format, force) Use `nf-core pipelines create-logo` instead. """ log.warning( - "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipelines screate-logo[/]` instead." + "The `[magenta]nf-core create-logo[/]` command is deprecated. Use `[magenta]nf-core pipeliness create-logo[/]` instead." ) pipelines_create_logo(logo_text, directory, name, theme, width, format, force) # nf-core sync (deprecated) @nf_core_cli.command("sync", hidden=True, deprecated=True) +@click.pass_context @click.option( "-d", "--dir", @@ -1849,14 +1951,14 @@ def command_create_logo(logo_text, directory, name, theme, width, format, force) @click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.") @click.option("-u", "--username", type=str, help="GitHub PR: auth username.") @click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template") -def command_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): +def command_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ Use `nf-core pipelines sync` instead. """ log.warning( "The `[magenta]nf-core sync[/]` command is deprecated. Use `[magenta]nf-core pipelines sync[/]` instead." ) - pipelines_sync(directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) + pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr) # nf-core bump-version (deprecated) @@ -2114,8 +2216,7 @@ def command_download( outdir, compress, force, - tower, - platform, + platform or tower, download_configuration, tag, container_system, diff --git a/nf_core/commands_modules.py b/nf_core/commands_modules.py index 57c8e9777c..33b1f75160 100644 --- a/nf_core/commands_modules.py +++ b/nf_core/commands_modules.py @@ -261,7 +261,7 @@ def modules_test(ctx, tool, directory, no_prompts, update, once, profile, migrat sys.exit(1) -def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version): +def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, passed, sort_by, fix_version, fix): """ Lint one or more modules in a directory. @@ -278,6 +278,7 @@ def modules_lint(ctx, tool, directory, registry, key, all, fail_warned, local, p module_lint = ModuleLint( directory, fail_warned=fail_warned, + fix=fix, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 23affb1d27..3b28f4979c 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -2,6 +2,7 @@ import os import sys from pathlib import Path +from typing import Optional, Union import rich @@ -167,7 +168,6 @@ def pipelines_download( outdir, compress, force, - tower, platform, download_configuration, tag, @@ -185,16 +185,13 @@ def pipelines_download( """ from nf_core.pipelines.download import DownloadWorkflow - if tower: - log.warning("[red]The `-t` / `--tower` flag is deprecated. Please use `--platform` instead.[/]") - dl = DownloadWorkflow( pipeline, revision, outdir, compress, force, - tower or platform, # True if either specified + platform, download_configuration, tag, container_system, @@ -281,6 +278,33 @@ def pipelines_list(ctx, keywords, sort, json, show_archived): stdout.print(list_workflows(keywords, sort, json, show_archived)) +# nf-core pipelines rocrate +def pipelines_rocrate( + ctx, + pipeline_dir: Union[str, Path], + json_path: Optional[Union[str, Path]], + zip_path: Optional[Union[str, Path]], + pipeline_version: str, +) -> None: + from nf_core.pipelines.rocrate import ROCrate + + if json_path is None and zip_path is None: + log.error("Either `--json_path` or `--zip_path` must be specified.") + sys.exit(1) + else: + pipeline_dir = Path(pipeline_dir) + if json_path is not None: + json_path = Path(json_path) + if zip_path is not None: + zip_path = Path(zip_path) + try: + rocrate_obj = ROCrate(pipeline_dir, pipeline_version) + rocrate_obj.create_rocrate(json_path=json_path, zip_path=zip_path) + except (UserWarning, LookupError, FileNotFoundError) as e: + log.error(e) + sys.exit(1) + + # nf-core pipelines sync def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ diff --git a/nf_core/commands_subworkflows.py b/nf_core/commands_subworkflows.py index a32f8d5c3e..8e90a8116b 100644 --- a/nf_core/commands_subworkflows.py +++ b/nf_core/commands_subworkflows.py @@ -104,7 +104,7 @@ def subworkflows_list_local(ctx, keywords, json, directory): # pylint: disable= sys.exit(1) -def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by): +def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warned, local, passed, sort_by, fix): """ Lint one or more subworkflows in a directory. @@ -121,6 +121,7 @@ def subworkflows_lint(ctx, subworkflow, directory, registry, key, all, fail_warn subworkflow_lint = SubworkflowLint( directory, fail_warned=fail_warned, + fix=fix, registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], diff --git a/nf_core/modules/modules_differ.py b/nf_core/components/components_differ.py similarity index 76% rename from nf_core/modules/modules_differ.py rename to nf_core/components/components_differ.py index b6d7f0d0fa..db51c1910d 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/components/components_differ.py @@ -6,7 +6,9 @@ from pathlib import Path from typing import Dict, List, Union -from rich.console import Console +from rich import box +from rich.console import Console, Group, RenderableType +from rich.panel import Panel from rich.syntax import Syntax import nf_core.utils @@ -14,10 +16,10 @@ log = logging.getLogger(__name__) -class ModulesDiffer: +class ComponentsDiffer: """ Static class that provides functionality for computing diffs between - different instances of a module + different instances of a module or subworkflow """ class DiffEnum(enum.Enum): @@ -32,15 +34,15 @@ class DiffEnum(enum.Enum): REMOVED = enum.auto() @staticmethod - def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): + def get_component_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): """ - Compute the diff between the current module version + Compute the diff between the current component version and the new version. Args: - from_dir (strOrPath): The folder containing the old module files - to_dir (strOrPath): The folder containing the new module files - path_in_diff (strOrPath): The directory displayed containing the module + from_dir (strOrPath): The folder containing the old component files + to_dir (strOrPath): The folder containing the new component files + path_in_diff (strOrPath): The directory displayed containing the component file in the diff. Added so that temporary dirs are not shown for_git (bool): indicates whether the diff file is to be @@ -50,7 +52,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d dsp_to_dir (str | Path): The to directory to display in the diff Returns: - dict[str, (ModulesDiffer.DiffEnum, str)]: A dictionary containing + dict[str, (ComponentsDiffer.DiffEnum, str)]: A dictionary containing the diff type and the diff string (empty if no diff) """ if for_git: @@ -70,7 +72,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d ) files = list(files) - # Loop through all the module files and compute their diffs if needed + # Loop through all the component files and compute their diffs if needed for file in files: temp_path = Path(to_dir, file) curr_path = Path(from_dir, file) @@ -82,7 +84,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d if new_lines == old_lines: # The files are identical - diffs[file] = (ModulesDiffer.DiffEnum.UNCHANGED, ()) + diffs[file] = (ComponentsDiffer.DiffEnum.UNCHANGED, ()) else: # Compute the diff diff = difflib.unified_diff( @@ -91,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CHANGED, diff) elif temp_path.exists(): with open(temp_path) as fh: @@ -104,7 +106,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path("/dev", "null")), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CREATED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CREATED, diff) elif curr_path.exists(): # The file was removed @@ -117,14 +119,14 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path("/dev", "null")), ) - diffs[file] = (ModulesDiffer.DiffEnum.REMOVED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.REMOVED, diff) return diffs @staticmethod def write_diff_file( diff_path, - module, + component, repo_path, from_dir, to_dir, @@ -137,20 +139,19 @@ def write_diff_file( limit_output=False, ): """ - Writes the diffs of a module to the diff file. + Writes the diffs of a component to the diff file. Args: diff_path (str | Path): The path to the file that should be appended - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - diffs (dict[str, (ModulesDiffer.DiffEnum, str)]): A dictionary containing + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + diffs (dict[str, (ComponentsDiffer.DiffEnum, str)]): A dictionary containing the type of change and the diff (if any) - module_dir (str | Path): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against for_git (bool): indicates whether the diff file is to be compatible with `git apply`. If true it adds a/ and b/ prefixes to the file paths @@ -163,36 +164,36 @@ def write_diff_file( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) - if all(diff_status == ModulesDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): - raise UserWarning("Module is unchanged") - log.debug(f"Writing diff of '{module}' to '{diff_path}'") + diffs = ComponentsDiffer.get_component_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) + if all(diff_status == ComponentsDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): + raise UserWarning("Component is unchanged") + log.debug(f"Writing diff of '{component}' to '{diff_path}'") with open(diff_path, file_action) as fh: if current_version is not None and new_version is not None: fh.write( - f"Changes in module '{Path(repo_path, module)}' between" + f"Changes in component '{Path(repo_path, component)}' between" f" ({current_version}) and" f" ({new_version})\n" ) else: - fh.write(f"Changes in module '{Path(repo_path, module)}'\n") + fh.write(f"Changes in component '{Path(repo_path, component)}'\n") for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was created\n") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was removed\n") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - fh.write(f"Changes in '{Path(module, file)}' but not shown\n") + fh.write(f"Changes in '{Path(component, file)}' but not shown\n") else: # The file has changed write the diff lines to the file - fh.write(f"Changes in '{Path(module, file)}':\n") + fh.write(f"Changes in '{Path(component, file)}':\n") for line in diff: fh.write(line) fh.write("\n") @@ -235,7 +236,7 @@ def append_modules_json_diff(diff_path, old_modules_json, new_modules_json, modu @staticmethod def print_diff( - module, + component, repo_path, from_dir, to_dir, @@ -246,16 +247,15 @@ def print_diff( limit_output=False, ): """ - Prints the diffs between two module versions to the terminal + Prints the diffs between two component versions to the terminal Args: - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - module_dir (str): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff limit_output (bool): If true, don't print the diff for files other than main.nf @@ -265,35 +265,49 @@ def print_diff( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs( + diffs = ComponentsDiffer.get_component_diffs( from_dir, to_dir, for_git=False, dsp_from_dir=dsp_from_dir, dsp_to_dir=dsp_to_dir ) console = Console(force_terminal=nf_core.utils.rich_force_colors()) if current_version is not None and new_version is not None: log.info( - f"Changes in module '{Path(repo_path, module)}' between" f" ({current_version}) and" f" ({new_version})" + f"Changes in component '{Path(repo_path, component)}' between" + f" ({current_version}) and" + f" ({new_version})" ) else: - log.info(f"Changes in module '{Path(repo_path, module)}'") + log.info(f"Changes in component '{Path(repo_path, component)}'") + panel_group: list[RenderableType] = [] for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical log.info(f"'{Path(dsp_from_dir, file)}' is unchanged") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits log.info(f"'{Path(dsp_from_dir, file)}' was created") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - log.info(f"Changes in '{Path(module, file)}' but not shown") + log.info(f"Changes in '{Path(component, file)}' but not shown") else: # The file has changed - log.info(f"Changes in '{Path(module, file)}':") + log.info(f"Changes in '{Path(component, file)}':") # Pretty print the diff using the pygments diff lexer - console.print(Syntax("".join(diff), "diff", theme="ansi_dark", padding=1)) + syntax = Syntax("".join(diff), "diff", theme="ansi_dark", line_numbers=True) + panel_group.append(Panel(syntax, title=str(file), title_align="left", padding=0)) + console.print( + Panel( + Group(*panel_group), + title=f"[white]{str(component)}[/white]", + title_align="left", + padding=0, + border_style="blue", + box=box.HEAVY, + ) + ) @staticmethod def per_file_patch(patch_fn: Union[str, Path]) -> Dict[str, List[str]]: @@ -391,8 +405,8 @@ def get_new_and_old_lines(patch): def try_apply_single_patch(file_lines, patch, reverse=False): """ Tries to apply a patch to a modified file. Since the line numbers in - the patch does not agree if the file is modified, the old and new - lines inpatch are reconstructed and then we look for the old lines + the patch do not agree if the file is modified, the old and new + lines in the patch are reconstructed and then we look for the old lines in the modified file. If all hunk in the patch are found in the new file it is updated with the new lines from the patch file. @@ -408,7 +422,7 @@ def try_apply_single_patch(file_lines, patch, reverse=False): LookupError: If it fails to find the old lines from the patch in the file. """ - org_lines, patch_lines = ModulesDiffer.get_new_and_old_lines(patch) + org_lines, patch_lines = ComponentsDiffer.get_new_and_old_lines(patch) if reverse: patch_lines, org_lines = org_lines, patch_lines @@ -452,16 +466,22 @@ def try_apply_single_patch(file_lines, patch, reverse=False): @staticmethod def try_apply_patch( - module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False + component_type: str, + component: str, + repo_path: Union[str, Path], + patch_path: Union[str, Path], + component_dir: Path, + reverse: bool = False, ) -> Dict[str, List[str]]: """ - Try applying a full patch file to a module + Try applying a full patch file to a module or subworkflow Args: - module (str): Name of the module - repo_path (str): Name of the repository where the module resides + component_type (str): The type of component (modules or subworkflows) + component (str): Name of the module or subworkflow + repo_path (str): Name of the repository where the component resides patch_path (str): The absolute path to the patch file to be applied - module_dir (Path): The directory containing the module + component_dir (Path): The directory containing the component reverse (bool): Apply the patch in reverse Returns: @@ -471,19 +491,19 @@ def try_apply_patch( Raises: LookupError: If the patch application fails in a file """ - module_relpath = Path("modules", repo_path, module) - patches = ModulesDiffer.per_file_patch(patch_path) + component_relpath = Path(component_type, repo_path, component) + patches = ComponentsDiffer.per_file_patch(patch_path) new_files = {} for file, patch in patches.items(): log.debug(f"Applying patch to {file}") - fn = Path(file).relative_to(module_relpath) - file_path = module_dir / fn + fn = Path(file).relative_to(component_relpath) + file_path = component_dir / fn try: with open(file_path) as fh: file_lines = fh.readlines() except FileNotFoundError: # The file was added with the patch file_lines = [""] - patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) + patched_new_lines = ComponentsDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines return new_files diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py index 8a00e758cb..3acacb4fe4 100644 --- a/nf_core/components/components_utils.py +++ b/nf_core/components/components_utils.py @@ -4,6 +4,7 @@ from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union import questionary +import requests import rich.prompt import yaml @@ -199,3 +200,29 @@ def get_components_to_install( current_comp_dict[component_name].update(component_dict) return list(modules.values()), list(subworkflows.values()) + + +def get_biotools_id(tool_name) -> str: + """ + Try to find a bio.tools ID for 'tool' + """ + url = f"https://bio.tools/api/t/?q={tool_name}&format=json" + try: + # Send a GET request to the API + response = requests.get(url) + response.raise_for_status() # Raise an error for bad status codes + # Parse the JSON response + data = response.json() + + # Iterate through the tools in the response to find the tool name + for tool in data["list"]: + if tool["name"].lower() == tool_name: + return tool["biotoolsCURIE"] + + # If the tool name was not found in the response + log.warning(f"Could not find a bio.tools ID for '{tool_name}'") + return "" + + except requests.exceptions.RequestException as e: + log.warning(f"Could not find a bio.tools ID for '{tool_name}': {e}") + return "" diff --git a/nf_core/components/create.py b/nf_core/components/create.py index c71b128415..c781905618 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -21,6 +21,7 @@ import nf_core import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_utils import get_biotools_id from nf_core.pipelines.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -61,6 +62,7 @@ def __init__( self.file_paths: Dict[str, Path] = {} self.not_empty_template = not empty_template self.migrate_pytest = migrate_pytest + self.tool_identifier = "" def create(self) -> bool: """ @@ -149,6 +151,8 @@ def create(self) -> bool: if self.component_type == "modules": # Try to find a bioconda package for 'component' self._get_bioconda_tool() + # Try to find a biotools entry for 'component' + self.tool_identifier = get_biotools_id(self.component) # Prompt for GitHub username self._get_username() @@ -244,7 +248,7 @@ def _get_module_structure_components(self): if self.process_label is None: log.info( "Provide an appropriate resource label for the process, taken from the " - "[link=https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" + "[link=https://github.com/nf-core/tools/blob/main/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" "For example: {}".format(", ".join(process_label_defaults)) ) while self.process_label is None: diff --git a/nf_core/components/info.py b/nf_core/components/info.py index 98f8be5272..31769785a1 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -211,9 +211,9 @@ def get_local_yaml(self) -> Optional[Dict]: return yaml.safe_load(fh) log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") - return None + return {} - def get_remote_yaml(self) -> Optional[dict]: + def get_remote_yaml(self) -> Optional[Dict]: """Attempt to get the meta.yml file from a remote repo. Returns: @@ -229,6 +229,25 @@ def get_remote_yaml(self) -> Optional[dict]: self.remote_location = self.modules_repo.remote_url return yaml.safe_load(file_contents) + def generate_params_table(self, type) -> Table: + "Generate a rich table for inputs and outputs" + table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) + table.add_column(f":inbox_tray: {type}") + table.add_column("Description") + if self.component_type == "modules": + table.add_column("Pattern", justify="right", style="green") + elif self.component_type == "subworkflows": + table.add_column("Structure", justify="right", style="green") + return table + + def get_channel_structure(self, structure: dict) -> str: + "Get the structure of a channel" + structure_str = "" + for key, info in structure.items(): + pattern = f" - {info['pattern']}" if info.get("pattern") else "" + structure_str += f"{key} ({info['type']}{pattern})" + return structure_str + def generate_component_info_help(self): """Take the parsed meta.yml and generate rich help. @@ -277,33 +296,48 @@ def generate_component_info_help(self): # Inputs if self.meta.get("input"): - inputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - inputs_table.add_column(":inbox_tray: Inputs") - inputs_table.add_column("Description") - inputs_table.add_column("Pattern", justify="right", style="green") - for input in self.meta["input"]: - for key, info in input.items(): - inputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) + inputs_table = self.generate_params_table("Inputs") + for i, input in enumerate(self.meta["input"]): + inputs_table.add_row(f"[italic]input[{i}][/]", "", "") + if self.component_type == "modules": + for element in input: + for key, info in element.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + elif self.component_type == "subworkflows": + for key, info in input.items(): + inputs_table.add_row( + f"[orange1 on black] {key} [/][dim i]", + Markdown(info["description"] if info["description"] else ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", + ) renderables.append(inputs_table) # Outputs if self.meta.get("output"): - outputs_table = Table(expand=True, show_lines=True, box=box.MINIMAL_HEAVY_HEAD, padding=0) - outputs_table.add_column(":outbox_tray: Outputs") - outputs_table.add_column("Description") - outputs_table.add_column("Pattern", justify="right", style="green") + outputs_table = self.generate_params_table("Outputs") for output in self.meta["output"]: - for key, info in output.items(): - outputs_table.add_row( - f"[orange1 on black] {key} [/][dim i] ({info['type']})", - Markdown(info["description"] if info["description"] else ""), - info.get("pattern", ""), - ) + if self.component_type == "modules": + for ch_name, elements in output.items(): + outputs_table.add_row(f"{ch_name}", "", "") + for element in elements: + for key, info in element.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i] ({info['type']})", + Markdown(info["description"] if info["description"] else ""), + info.get("pattern", ""), + ) + elif self.component_type == "subworkflows": + for key, info in output.items(): + outputs_table.add_row( + f"[orange1 on black] {key} [/][dim i]", + Markdown(info["description"] if info["description"] else ""), + self.get_channel_structure(info["structure"]) if info.get("structure") else "", + ) renderables.append(outputs_table) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index c1b1f24cb7..69740135a8 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -22,7 +22,7 @@ from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_json import ModulesJson from nf_core.pipelines.lint_utils import console -from nf_core.utils import LintConfigType +from nf_core.utils import NFCoreYamlLintConfig from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -57,6 +57,7 @@ def __init__( component_type: str, directory: Union[str, Path], fail_warned: bool = False, + fix: bool = False, remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -73,12 +74,13 @@ def __init__( ) self.fail_warned = fail_warned + self.fix = fix self.passed: List[LintResult] = [] self.warned: List[LintResult] = [] self.failed: List[LintResult] = [] self.all_local_components: List[NFCoreComponent] = [] - self.lint_config: Optional[LintConfigType] = None + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.modules_json: Optional[ModulesJson] = None if self.component_type == "modules": diff --git a/nf_core/components/list.py b/nf_core/components/list.py index 05a8f71120..4c20e60864 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -25,7 +25,7 @@ def __init__( self.remote = remote super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) - def _configure_repo_and_paths(self, nf_dir_req=True) -> None: + def _configure_repo_and_paths(self, nf_dir_req: bool = True) -> None: """ Override the default with nf_dir_req set to False to allow info to be run from anywhere and still return remote info diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 0f3cdcdfbd..37e43a536e 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -5,7 +5,7 @@ import logging import re from pathlib import Path -from typing import List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union log = logging.getLogger(__name__) @@ -41,6 +41,7 @@ def __init__( remote_component (bool): Whether the module is to be treated as a nf-core or local component """ + self.component_type = component_type self.component_name = component_name self.repo_url = repo_url self.component_dir = component_dir @@ -49,7 +50,7 @@ def __init__( self.passed: List[Tuple[str, str, Path]] = [] self.warned: List[Tuple[str, str, Path]] = [] self.failed: List[Tuple[str, str, Path]] = [] - self.inputs: List[str] = [] + self.inputs: List[List[Dict[str, Dict[str, str]]]] = [] self.outputs: List[str] = [] self.has_meta: bool = False self.git_sha: Optional[str] = None @@ -170,45 +171,95 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st def get_inputs_from_main_nf(self) -> None: """Collect all inputs from the main.nf file.""" - inputs: List[str] = [] + inputs: Any = [] # Can be 'list[list[dict[str, dict[str, str]]]]' or 'list[str]' with open(self.main_nf) as f: data = f.read() - # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo - # regex matches: - # val(foo) - # path(bar) - # val foo - # val bar - # path bar - # path foo - # don't match anything inside comments or after "output:" - if "input:" not in data: - log.debug(f"Could not find any inputs in {self.main_nf}") - input_data = data.split("input:")[1].split("output:")[0] - regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" - matches = re.finditer(regex, input_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - if match.group(3): - input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - elif match.group(4): - input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases - inputs.append(input_val) - log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") - self.inputs = inputs + if self.component_type == "modules": + # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo + # regex matches: + # val(foo) + # path(bar) + # val foo + # val bar + # path bar + # path foo + # don't match anything inside comments or after "output:" + if "input:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return + input_data = data.split("input:")[1].split("output:")[0] + for line in input_data.split("\n"): + channel_elements: Any = [] + regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + matches = re.finditer(regex, line) + for _, match in enumerate(matches, start=1): + input_val = None + if match.group(3): + input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + elif match.group(4): + input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases + if input_val: + channel_elements.append({input_val: {}}) + if len(channel_elements) > 0: + inputs.append(channel_elements) + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs + elif self.component_type == "subworkflows": + # get input values from main.nf after "take:" + if "take:" not in data: + log.debug(f"Could not find any inputs in {self.main_nf}") + return + # get all lines between "take" and "main" or "emit" + input_data = data.split("take:")[1].split("main:")[0].split("emit:")[0] + for line in input_data.split("\n"): + try: + inputs.append(line.split()[0]) + except IndexError: + pass # Empty lines + log.debug(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs def get_outputs_from_main_nf(self): outputs = [] with open(self.main_nf) as f: data = f.read() - # get output values from main.nf after "output:". the names are always after "emit:" - if "output:" not in data: - log.debug(f"Could not find any outputs in {self.main_nf}") - return outputs - output_data = data.split("output:")[1].split("when:")[0] - regex = r"emit:\s*([^)\s,]+)" - matches = re.finditer(regex, output_data, re.MULTILINE) - for _, match in enumerate(matches, start=1): - outputs.append(match.group(1)) - log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") - self.outputs = outputs + if self.component_type == "modules": + # get output values from main.nf after "output:". the names are always after "emit:" + if "output:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("output:")[1].split("when:")[0] + regex_emit = r"emit:\s*([^)\s,]+)" + regex_elements = r"(val|path|env|stdout)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + for line in output_data.split("\n"): + match_emit = re.search(regex_emit, line) + matches_elements = re.finditer(regex_elements, line) + if not match_emit: + continue + output_channel = {match_emit.group(1): []} + for _, match_element in enumerate(matches_elements, start=1): + output_val = None + if match_element.group(3): + output_val = match_element.group(3) + elif match_element.group(4): + output_val = match_element.group(4) + if output_val: + output_val = output_val.strip("'").strip('"') # remove quotes + output_channel[match_emit.group(1)].append({output_val: {}}) + outputs.append(output_channel) + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs + elif self.component_type == "subworkflows": + # get output values from main.nf after "emit:". Can be named outputs or not. + if "emit:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("emit:")[1].split("}")[0] + for line in output_data.split("\n"): + try: + outputs.append(line.split("=")[0].split()[0]) + except IndexError: + # Empty lines + pass + log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 41fccd8be2..59ec7a381b 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -8,7 +8,7 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer from nf_core.modules.modules_json import ModulesJson log = logging.getLogger(__name__) @@ -65,7 +65,9 @@ def patch(self, component=None): component_fullname = str(Path(self.component_type, self.modules_repo.repo_path, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -112,7 +114,7 @@ def patch(self, component=None): # Write the patch to a temporary location (otherwise it is printed to the screen later) patch_temp_path = tempfile.mktemp() try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( patch_temp_path, component, self.modules_repo.repo_path, @@ -127,11 +129,13 @@ def patch(self, component=None): raise UserWarning(f"{self.component_type[:-1]} '{component_fullname}' is unchanged. No patch to compute") # Write changes to modules.json - self.modules_json.add_patch_entry(component, self.modules_repo.remote_url, component_dir, patch_relpath) + self.modules_json.add_patch_entry( + self.component_type, component, self.modules_repo.remote_url, component_dir, patch_relpath + ) log.debug(f"Wrote patch path for {self.component_type[:-1]} {component} to modules.json") # Show the changes made to the module - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, self.modules_repo.repo_path, component_install_dir, @@ -166,7 +170,9 @@ def remove(self, component): component_fullname = str(Path(self.component_type, component_dir, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -202,7 +208,7 @@ def remove(self, component): # Try to apply the patch in reverse and move resulting files to module dir temp_component_dir = self.modules_json.try_apply_patch_reverse( - component, self.modules_repo.repo_path, patch_relpath, component_path + self.component_type, component, self.modules_repo.repo_path, patch_relpath, component_path ) try: for file in Path(temp_component_dir).glob("*"): diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index c2c5843918..37208629c0 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -68,7 +68,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals if not component_dir.exists(): log.error(f"Installation directory '{component_dir}' does not exist.") - if modules_json.module_present(component, self.modules_repo.remote_url, repo_path): + if modules_json.component_present(component, self.modules_repo.remote_url, repo_path, self.component_type): log.error(f"Found entry for '{component}' in 'modules.json'. Removing...") modules_json.remove_entry(self.component_type, component, self.modules_repo.remote_url, repo_path) return False diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 97aba9aa20..1e80b05e3a 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -10,13 +10,13 @@ import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.components_utils import ( get_components_to_install, prompt_component_version_sha, ) from nf_core.components.install import ComponentInstall from nf_core.components.remove import ComponentRemove -from nf_core.modules.modules_differ import ModulesDiffer from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo from nf_core.utils import plural_es, plural_s, plural_y @@ -58,7 +58,7 @@ def __init__( self.branch = branch def _parameter_checks(self): - """Checks the compatibilty of the supplied parameters. + """Checks the compatibility of the supplied parameters. Raises: UserWarning: if any checks fail. @@ -233,7 +233,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr f"Writing diff file for {self.component_type[:-1]} '{component_fullname}' to '{self.save_diff_fn}'" ) try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( self.save_diff_fn, component, modules_repo.repo_path, @@ -275,7 +275,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) elif self.show_diff: - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, modules_repo.repo_path, component_dir, @@ -323,7 +323,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if self.save_diff_fn: # Write the modules.json diff to the file - ModulesDiffer.append_modules_json_diff( + ComponentsDiffer.append_modules_json_diff( self.save_diff_fn, old_modules_json, self.modules_json.get_modules_json(), @@ -459,7 +459,9 @@ def get_single_component_info(self, component): self.modules_repo.setup_branch(current_branch) # If there is a patch file, get its filename - patch_fn = self.modules_json.get_patch_fn(component, self.modules_repo.remote_url, install_dir) + patch_fn = self.modules_json.get_patch_fn( + self.component_type, component, self.modules_repo.remote_url, install_dir + ) return (self.modules_repo, component, sha, patch_fn) @@ -705,7 +707,12 @@ def get_all_components_info(self, branch=None): # Add patch filenames to the components that have them components_info = [ - (repo, comp, sha, self.modules_json.get_patch_fn(comp, repo.remote_url, repo.repo_path)) + ( + repo, + comp, + sha, + self.modules_json.get_patch_fn(self.component_type, comp, repo.remote_url, repo.repo_path), + ) for repo, comp, sha in components_info ] @@ -820,7 +827,9 @@ def try_apply_patch( shutil.copytree(component_install_dir, temp_component_dir) try: - new_files = ModulesDiffer.try_apply_patch(component, repo_path, patch_path, temp_component_dir) + new_files = ComponentsDiffer.try_apply_patch( + self.component_type, component, repo_path, patch_path, temp_component_dir + ) except LookupError: # Patch failed. Save the patch file by moving to the install dir shutil.move(patch_path, Path(component_install_dir, patch_path.relative_to(component_dir))) @@ -838,7 +847,7 @@ def try_apply_patch( # Create the new patch file log.debug("Regenerating patch file") - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( Path(temp_component_dir, patch_path.relative_to(component_dir)), component, repo_path, @@ -858,7 +867,12 @@ def try_apply_patch( # Add the patch file to the modules.json file self.modules_json.add_patch_entry( - component, self.modules_repo.remote_url, repo_path, patch_relpath, write_file=write_file + self.component_type, + component, + self.modules_repo.remote_url, + repo_path, + patch_relpath, + write_file=write_file, ) return True diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index c462c6a47e..a0002ed424 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,7 +2,7 @@ # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . # See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. -FROM gitpod/workspace-base@sha256:f189a4195c3861365356f9c1b438ab26fd88e1ff46ce2843afc62861fc982e0c +FROM gitpod/workspace-base@sha256:12853f7c901eb2b677a549cb112c85f9679d18feb30093bcc63aa252540ecad9 USER root @@ -23,9 +23,9 @@ RUN apt-get update --quiet && \ add-apt-repository -y ppa:apptainer/ppa && \ apt-get update --quiet && \ apt-get install --quiet --yes apptainer && \ - wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ - bash Miniconda3-latest-Linux-x86_64.sh -b -p /opt/conda && \ - rm Miniconda3-latest-Linux-x86_64.sh && \ + wget --quiet https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh && \ + bash Miniforge3-Linux-x86_64.sh -b -p /opt/conda && \ + rm Miniforge3-Linux-x86_64.sh && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* diff --git a/nf_core/module-template/meta.yml b/nf_core/module-template/meta.yml index 9d3f3c1c12..d9d1cc8ae8 100644 --- a/nf_core/module-template/meta.yml +++ b/nf_core/module-template/meta.yml @@ -20,48 +20,67 @@ tools: tool_dev_url: "{{ tool_dev_url }}" doi: "" licence: {{ tool_licence }} + identifier: {{ tool_identifier }} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as input {% endif -%} input: #{% if has_meta %} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` {% endif %} {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input {%- endif %} - - {{ 'bam:' if not_empty_template else "input:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ 'bam:' if not_empty_template else "input:" }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else %} + - edam: "" + {%- endif %} {% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as output {% endif -%} output: + - {{ 'bam:' if not_empty_template else "output:" }} #{% if has_meta -%} Only when we have meta - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. `[ id:'sample1', single_end:false ]` - {% endif %} - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. `[ id:'sample1', single_end:false ]` + {%- endif %} {% if not_empty_template -%} - ## TODO nf-core: Delete / customise this example output + ## TODO nf-core: Delete / customise this example output {%- endif %} - - {{ 'bam:' if not_empty_template else "output:" }} - type: file - description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} - pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + - {{ '"*.bam":' if not_empty_template else '"*":' }} + type: file + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} + ontologies: + {% if not_empty_template -%} + - edam: "http://edamontology.org/format_25722" + - edam: "http://edamontology.org/format_2573" + - edam: "http://edamontology.org/format_3462" + {% else -%} + - edam: "" + {%- endif %} + - versions: + - "versions.yml": + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "{{ author }}" diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 017b3965b4..49012cff40 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -14,20 +14,22 @@ import questionary import rich import rich.progress +import ruamel.yaml import nf_core.components import nf_core.components.nfcore_component import nf_core.modules.modules_utils import nf_core.utils +from nf_core.components.components_utils import get_biotools_id from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.pipelines.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) from .environment_yml import environment_yml from .main_nf import main_nf -from .meta_yml import meta_yml +from .meta_yml import meta_yml, obtain_correct_and_specified_inputs, obtain_correct_and_specified_outputs, read_meta_yml from .module_changes import module_changes from .module_deprecations import module_deprecations from .module_patch import module_patch @@ -46,6 +48,9 @@ class ModuleLint(ComponentLint): environment_yml = environment_yml main_nf = main_nf meta_yml = meta_yml + obtain_correct_and_specified_inputs = obtain_correct_and_specified_inputs + obtain_correct_and_specified_outputs = obtain_correct_and_specified_outputs + read_meta_yml = read_meta_yml module_changes = module_changes module_deprecations = module_deprecations module_patch = module_patch @@ -57,6 +62,7 @@ def __init__( self, directory: Union[str, Path], fail_warned: bool = False, + fix: bool = False, remote_url: Optional[str] = None, branch: Optional[str] = None, no_pull: bool = False, @@ -67,6 +73,7 @@ def __init__( component_type="modules", directory=directory, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -237,6 +244,12 @@ def lint_module( # Otherwise run all the lint tests else: + mod.get_inputs_from_main_nf() + mod.get_outputs_from_main_nf() + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(mod) + if self.repo_type == "pipeline" and self.modules_json and mod.repo_url: # Set correct sha version = self.modules_json.get_module_version(mod.component_name, mod.repo_url, mod.org) @@ -256,3 +269,104 @@ def lint_module( self.failed += warned self.failed += [LintResult(mod, *m) for m in mod.failed] + + def update_meta_yml_file(self, mod): + """ + Update the meta.yml file with the correct inputs and outputs + """ + meta_yml = self.read_meta_yml(mod) + corrected_meta_yml = meta_yml.copy() + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Obtain inputs and outputs from main.nf and meta.yml + # Used to compare only the structure of channels and elements + # Do not compare features to allow for custom features in meta.yml (i.e. pattern) + if "input" in meta_yml: + correct_inputs, meta_inputs = self.obtain_correct_and_specified_inputs(mod, meta_yml) + if "output" in meta_yml: + correct_outputs, meta_outputs = self.obtain_correct_and_specified_outputs(mod, meta_yml) + + if "input" in meta_yml and correct_inputs != meta_inputs: + log.debug( + f"Correct inputs: '{correct_inputs}' differ from current inputs: '{meta_inputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["input"] = mod.inputs.copy() # list of lists (channels) of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["input"]): + for j, element in enumerate(channel): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["input"]): + try: + # Handle old format of meta.yml: list of dicts (channels) + if element_name in meta_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + except AttributeError: + # Handle new format of meta.yml: list of lists (channels) of elements (dicts) + for x, meta_ch_element in enumerate(meta_element): + if element_name in meta_ch_element.keys(): + # Copy current features of that input element form meta.yml + for feature in meta_element[x][element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["input"][i][j][element_name][feature] = meta_element[x][ + element_name + ][feature] + break + + if "output" in meta_yml and correct_outputs != meta_outputs: + log.debug( + f"Correct outputs: '{correct_outputs}' differ from current outputs: '{meta_outputs}' in '{mod.meta_yml}'" + ) + corrected_meta_yml["output"] = mod.outputs.copy() # list of dicts (channels) with list of dicts (elements) + for i, channel in enumerate(corrected_meta_yml["output"]): + ch_name = list(channel.keys())[0] + for j, element in enumerate(channel[ch_name]): + element_name = list(element.keys())[0] + for k, meta_element in enumerate(meta_yml["output"]): + if element_name in meta_element.keys(): + # Copy current features of that output element form meta.yml + for feature in meta_element[element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = meta_element[ + element_name + ][feature] + break + elif ch_name in meta_element.keys(): + # When the previous output element was using the name of the channel + # Copy current features of that output element form meta.yml + try: + # Handle old format of meta.yml + for feature in meta_element[ch_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_element[ch_name][feature] + ) + except AttributeError: + # Handle new format of meta.yml + for x, meta_ch_element in enumerate(meta_element[ch_name]): + for meta_ch_element_name in meta_ch_element.keys(): + for feature in meta_ch_element[meta_ch_element_name].keys(): + if feature not in element[element_name].keys(): + corrected_meta_yml["output"][i][ch_name][j][element_name][feature] = ( + meta_ch_element[meta_ch_element_name][feature] + ) + break + + # Add bio.tools identifier + for i, tool in enumerate(corrected_meta_yml["tools"]): + tool_name = list(tool.keys())[0] + if "identifier" not in tool[tool_name]: + corrected_meta_yml["tools"][i][tool_name]["identifier"] = get_biotools_id( + mod.component_name if "/" not in mod.component_name else mod.component_name.split("/")[0] + ) + + with open(mod.meta_yml, "w") as fh: + log.info(f"Updating {mod.meta_yml}") + yaml.dump(corrected_meta_yml, fh) + run_prettier_on_file(fh.name) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index dbc1bed737..848e17130e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -15,8 +15,8 @@ import nf_core import nf_core.modules.modules_utils +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -50,7 +50,8 @@ def main_nf( # otherwise read the lines directly from the module lines: List[str] = [] if module.is_patched: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, @@ -269,7 +270,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): url = None line = raw_line.strip(" \n'\"}:") - # Catch preceeding "container " + # Catch preceding "container " if line.startswith("container"): line = line.replace("container", "").strip(" \n'\"}:") @@ -342,6 +343,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar): continue try: container_url = "https://" + urlunparse(url) if not url.scheme == "https" else urlunparse(url) + log.debug(f"Trying to connect to URL: {container_url}") response = requests.head( container_url, stream=True, diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 4a0ef6e01e..d0268a40cc 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -1,12 +1,16 @@ import json +import logging from pathlib import Path +from typing import Union -import yaml +import ruamel.yaml from jsonschema import exceptions, validators +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer + +log = logging.getLogger(__name__) def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None: @@ -39,12 +43,11 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None """ - module.get_inputs_from_main_nf() - module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case - meta_yaml = None + meta_yaml = read_meta_yml(module_lint_object, module) if module.is_patched and module_lint_object.modules_repo.repo_path is not None: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, @@ -52,17 +55,15 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None reverse=True, ).get("meta.yml") if lines is not None: + yaml = ruamel.yaml.YAML() meta_yaml = yaml.safe_load("".join(lines)) if module.meta_yml is None: raise LintExceptionError("Module does not have a `meta.yml` file") if meta_yaml is None: - try: - with open(module.meta_yml) as fh: - meta_yaml = yaml.safe_load(fh) - module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) - except FileNotFoundError: - module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) - return + module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) + return + else: + module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml)) # Confirm that the meta.yml file is valid according to the JSON schema valid_meta_yml = False @@ -93,79 +94,8 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None ) ) - # Confirm that all input and output channels are specified + # Confirm that all input and output channels are correctly specified if valid_meta_yml: - if "input" in meta_yaml: - meta_input = [list(x.keys())[0] for x in meta_yaml["input"]] - for input in module.inputs: - if input in meta_input: - module.passed.append(("meta_input_main_only", f"`{input}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_input_main_only", - f"`{input}` is present as an input in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any inputs in meta.yml that are not in main.nf - for input in meta_input: - if input in module.inputs: - module.passed.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_input_meta_only", - f"`{input}` is present as an input in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) - - if "output" in meta_yaml and meta_yaml["output"] is not None: - meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] - for output in module.outputs: - if output in meta_output: - module.passed.append(("meta_output_main_only", f"`{output}` specified", module.meta_yml)) - else: - module.warned.append( - ( - "meta_output_main_only", - f"`{output}` is present as an output in the `main.nf`, but missing in `meta.yml`", - module.meta_yml, - ) - ) - # check if there are any outputs in meta.yml that are not in main.nf - for output in meta_output: - if output in module.outputs: - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` and `main.nf`", - module.meta_yml, - ) - ) - elif output == "meta": - module.passed.append( - ( - "meta_output_meta_only", - f"`{output}` is skipped for `meta.yml` outputs", - module.meta_yml, - ) - ) - else: - module.warned.append( - ( - "meta_output_meta_only", - f"`{output}` is present as an output in `meta.yml` but not in `main.nf`", - module.meta_yml, - ) - ) # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == module.process_name: module.passed.append( @@ -183,3 +113,180 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None module.meta_yml, ) ) + # Check that inputs are specified in meta.yml + if len(module.inputs) > 0 and "input" not in meta_yaml: + module.failed.append( + ( + "meta_input", + "Inputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.inputs) > 0: + module.passed.append( + ( + "meta_input", + "Inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + log.debug(f"No inputs specified in module `main.nf`: {module.component_name}") + # Check that all inputs are correctly specified + if "input" in meta_yaml: + correct_inputs, meta_inputs = obtain_correct_and_specified_inputs(module_lint_object, module, meta_yaml) + + if correct_inputs == meta_inputs: + module.passed.append( + ( + "correct_meta_inputs", + "Correct inputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_inputs", + f"Module `meta.yml` does not match `main.nf`. Inputs should contain: {correct_inputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", + module.meta_yml, + ) + ) + + # Check that outputs are specified in meta.yml + if len(module.outputs) > 0 and "output" not in meta_yaml: + module.failed.append( + ( + "meta_output", + "Outputs not specified in module `meta.yml`", + module.meta_yml, + ) + ) + elif len(module.outputs) > 0: + module.passed.append( + ( + "meta_output", + "Outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + # Check that all outputs are correctly specified + if "output" in meta_yaml: + correct_outputs, meta_outputs = obtain_correct_and_specified_outputs(module_lint_object, module, meta_yaml) + + if correct_outputs == meta_outputs: + module.passed.append( + ( + "correct_meta_outputs", + "Correct outputs specified in module `meta.yml`", + module.meta_yml, + ) + ) + else: + module.failed.append( + ( + "correct_meta_outputs", + f"Module `meta.yml` does not match `main.nf`. Outputs should contain: {correct_outputs}\nRun `nf-core modules lint --fix` to update the `meta.yml` file.", + module.meta_yml, + ) + ) + + +def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> Union[dict, None]: + """ + Read a `meta.yml` file and return it as a dictionary + + Args: + module_lint_object (ComponentLint): The lint object for the module + module (NFCoreComponent): The module to read + + Returns: + dict: The `meta.yml` file as a dictionary + """ + meta_yaml = None + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + # Check if we have a patch file, get original file in that case + if module.is_patched: + lines = ComponentsDiffer.try_apply_patch( + module.component_type, + module.component_name, + module_lint_object.modules_repo.repo_path, + module.patch_path, + Path(module.component_dir).relative_to(module.base_dir), + reverse=True, + ).get("meta.yml") + if lines is not None: + meta_yaml = yaml.load("".join(lines)) + if meta_yaml is None: + if module.meta_yml is None: + return None + with open(module.meta_yml) as fh: + meta_yaml = yaml.load(fh) + return meta_yaml + + +def obtain_correct_and_specified_inputs(_, module, meta_yaml): + """ + Obtain the list of correct inputs and the elements of each input channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + tuple: A tuple containing two lists. The first list contains the correct inputs, + and the second list contains the inputs specified in meta.yml. + """ + correct_inputs = [] + for input_channel in module.inputs: + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + correct_inputs.append(channel_elements) + + meta_inputs = [] + for input_channel in meta_yaml["input"]: + if isinstance(input_channel, list): # Correct format + channel_elements = [] + for element in input_channel: + channel_elements.append(list(element.keys())[0]) + meta_inputs.append(channel_elements) + elif isinstance(input_channel, dict): # Old format + meta_inputs.append(list(input_channel.keys())[0]) + + return correct_inputs, meta_inputs + + +def obtain_correct_and_specified_outputs(_, module, meta_yaml): + """ + Obtain the dictionary of correct outputs and elements of each output channel. + + Args: + module (object): The module object. + meta_yaml (dict): The meta.yml dictionary. + + Returns: + correct_outputs (dict): A dictionary containing the correct outputs and their elements. + meta_outputs (dict): A dictionary containing the outputs specified in meta.yml. + """ + correct_outputs = {} + for output_channel in module.outputs: + channel_name = list(output_channel.keys())[0] + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + correct_outputs[channel_name] = channel_elements + + meta_outputs = {} + for output_channel in meta_yaml["output"]: + channel_name = list(output_channel.keys())[0] + if isinstance(output_channel[channel_name], list): # Correct format + channel_elements = [] + for element in output_channel[channel_name]: + channel_elements.append(list(element.keys())[0]) + meta_outputs[channel_name] = channel_elements + elif isinstance(output_channel[channel_name], dict): # Old format + meta_outputs[channel_name] = [] + + return correct_outputs, meta_outputs diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index eb76f4b88b..121de00c0a 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -7,7 +7,7 @@ from pathlib import Path import nf_core.modules.modules_repo -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer def module_changes(module_lint_object, module): @@ -30,7 +30,8 @@ def module_changes(module_lint_object, module): tempdir = tempdir_parent / "tmp_module_dir" shutil.copytree(module.component_dir, tempdir) try: - new_lines = ModulesDiffer.try_apply_patch( + new_lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module.org, module.patch_path, diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index 29bf78a66b..6347c5c553 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -1,7 +1,7 @@ from pathlib import Path +from ...components.components_differ import ComponentsDiffer from ...components.nfcore_component import NFCoreComponent -from ..modules_differ import ModulesDiffer def module_patch(module_lint_obj, module: NFCoreComponent): @@ -66,11 +66,11 @@ def check_patch_valid(module, patch_path): continue topath = Path(line.split(" ")[1].strip("\n")) if frompath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CREATED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CREATED)) elif topath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.REMOVED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.REMOVED)) elif frompath == topath: - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CHANGED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CHANGED)) else: module.failed.append( ( @@ -105,7 +105,7 @@ def check_patch_valid(module, patch_path): # Warn about any created or removed files passed = True for path, diff_status in paths_in_patch: - if diff_status == ModulesDiffer.DiffEnum.CHANGED: + if diff_status == ComponentsDiffer.DiffEnum.CHANGED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -116,7 +116,7 @@ def check_patch_valid(module, patch_path): ) passed = False continue - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -130,7 +130,7 @@ def check_patch_valid(module, patch_path): module.warned.append( ("patch", f"Patch file performs file creation of {path}. This is discouraged."), patch_path ) - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: if Path(module.base_dir, path).exists(): module.failed.append( ( @@ -161,7 +161,8 @@ def patch_reversible(module_lint_object, module, patch_path): (bool): False if any test failed, True otherwise """ try: - ModulesDiffer.try_apply_patch( + ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, patch_path, diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 08e117b1ad..a9ba3b442c 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -19,7 +19,7 @@ from nf_core.modules.modules_repo import ModulesRepo from nf_core.pipelines.lint_utils import dump_json_with_prettier -from .modules_differ import ModulesDiffer +from ..components.components_differ import ComponentsDiffer log = logging.getLogger(__name__) @@ -308,7 +308,9 @@ def determine_branches_and_shas( # If the module/subworkflow is patched patch_file = component_path / f"{component}.diff" if patch_file.is_file(): - temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) + temp_module_dir = self.try_apply_patch_reverse( + component_type, component, install_dir, patch_file, component_path + ) correct_commit_sha = self.find_correct_commit_sha( component_type, component, temp_module_dir, modules_repo ) @@ -432,7 +434,7 @@ def move_component_to_local(self, component_type: str, component: str, repo_name to_name += f"-{datetime.datetime.now().strftime('%y%m%d%H%M%S')}" shutil.move(str(current_path), local_dir / to_name) - def unsynced_components(self) -> Tuple[List[str], List[str], dict]: + def unsynced_components(self) -> Tuple[List[str], List[str], Dict]: """ Compute the difference between the modules/subworkflows in the directory and the modules/subworkflows in the 'modules.json' file. This is done by looking at all @@ -805,7 +807,7 @@ def remove_entry(self, component_type, name, repo_url, install_dir, removed_by=N return False - def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, write_file=True): + def add_patch_entry(self, component_type, component_name, repo_url, install_dir, patch_filename, write_file=True): """ Adds (or replaces) the patch entry for a module """ @@ -815,9 +817,11 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr if repo_url not in self.modules_json["repos"]: raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") - if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: - raise LookupError(f"Module '{install_dir}/{module_name}' not present in 'modules.json'") - self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] = str(patch_filename) + if component_name not in self.modules_json["repos"][repo_url][component_type][install_dir]: + raise LookupError( + f"{component_type[:-1].title()} '{install_dir}/{component_name}' not present in 'modules.json'" + ) + self.modules_json["repos"][repo_url][component_type][install_dir][component_name]["patch"] = str(patch_filename) if write_file: self.dump() @@ -833,17 +837,17 @@ def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True if write_file: self.dump() - def get_patch_fn(self, module_name, repo_url, install_dir): + def get_patch_fn(self, component_type, component_name, repo_url, install_dir): """ - Get the patch filename of a module + Get the patch filename of a component Args: - module_name (str): The name of the module - repo_url (str): The URL of the repository containing the module - install_dir (str): The name of the directory where modules are installed + component_name (str): The name of the component + repo_url (str): The URL of the repository containing the component + install_dir (str): The name of the directory where components are installed Returns: - (str): The patch filename for the module, None if not present + (str): The patch filename for the component, None if not present """ if self.modules_json is None: self.load() @@ -851,48 +855,53 @@ def get_patch_fn(self, module_name, repo_url, install_dir): path = ( self.modules_json["repos"] .get(repo_url, {}) - .get("modules") + .get(component_type) .get(install_dir) - .get(module_name, {}) + .get(component_name, {}) .get("patch") ) return Path(path) if path is not None else None - def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): + def try_apply_patch_reverse(self, component_type, component, repo_name, patch_relpath, component_dir): """ - Try reverse applying a patch file to the modified module files + Try reverse applying a patch file to the modified module or subworkflow files Args: - module (str): The name of the module - repo_name (str): The name of the repository where the module resides + component_type (str): The type of component [modules, subworkflows] + component (str): The name of the module or subworkflow + repo_name (str): The name of the repository where the component resides patch_relpath (Path | str): The path to patch file in the pipeline - module_dir (Path | str): The module directory in the pipeline + component_dir (Path | str): The component directory in the pipeline Returns: - (Path | str): The path of the folder where the module patched files are + (Path | str): The path of the folder where the component patched files are Raises: LookupError: If patch was not applied """ - module_fullname = str(Path(repo_name, module)) + component_fullname = str(Path(repo_name, component)) patch_path = Path(self.directory / patch_relpath) try: - new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, module_dir, reverse=True) + new_files = ComponentsDiffer.try_apply_patch( + component_type, component, repo_name, patch_path, component_dir, reverse=True + ) except LookupError as e: - raise LookupError(f"Failed to apply patch in reverse for module '{module_fullname}' due to: {e}") + raise LookupError( + f"Failed to apply patch in reverse for {component_type[:-1]} '{component_fullname}' due to: {e}" + ) # Write the patched files to a temporary directory log.debug("Writing patched files to tmpdir") temp_dir = Path(tempfile.mkdtemp()) - temp_module_dir = temp_dir / module - temp_module_dir.mkdir(parents=True, exist_ok=True) + temp_component_dir = temp_dir / component + temp_component_dir.mkdir(parents=True, exist_ok=True) for file, new_content in new_files.items(): - fn = temp_module_dir / file + fn = temp_component_dir / file with open(fn, "w") as fh: fh.writelines(new_content) - return temp_module_dir + return temp_component_dir def repo_present(self, repo_name): """ @@ -908,20 +917,21 @@ def repo_present(self, repo_name): return repo_name in self.modules_json.get("repos", {}) - def module_present(self, module_name, repo_url, install_dir): + def component_present(self, module_name, repo_url, install_dir, component_type): """ Checks if a module is present in the modules.json file Args: module_name (str): Name of the module repo_url (str): URL of the repository install_dir (str): Name of the directory where modules are installed + component_type (str): Type of component [modules, subworkflows] Returns: (bool): Whether the module is present in the 'modules.json' file """ if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( + return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get(component_type, {}).get( install_dir, {} ) @@ -1119,8 +1129,10 @@ def dump(self, run_prettier: bool = False) -> None: """ Sort the modules.json, and write it to file """ + # Sort the modules.json + if self.modules_json is None: + self.load() if self.modules_json is not None: - # Sort the modules.json self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) if run_prettier: dump_json_with_prettier(self.modules_json_path, self.modules_json) diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index 5145366e50..c78ec8e960 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -11,7 +11,7 @@ indent_style = space [*.{md,yml,yaml,html,css,scss,js}] indent_size = 2 -{%- if modules %} +{% if modules -%} # These files are edited and tested upstream in nf-core/modules [/modules/nf-core/**] charset = unset @@ -27,7 +27,7 @@ trim_trailing_whitespace = unset indent_style = unset {%- endif %} -{%- if email %} +{% if email -%} [/assets/email*] indent_size = unset {%- endif %} diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index f331d38673..37970c09e8 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# {{ name }}: Contributing Guidelines +# `{{ name }}`: Contributing Guidelines Hi there! Many thanks for taking an interest in improving {{ name }}. @@ -30,14 +30,14 @@ If you're not used to this workflow with git, you can start with some [docs from ## Tests -{%- if test_config %} +{% if test_config -%} You have the option to test your changes locally by running the pipeline. For receiving warnings about process selectors and other `debug` information, it is recommended to use the debug profile. Execute all the tests with the following command: ```bash nf-test test --profile debug,test,docker --verbose ``` -{% endif %} +{% endif -%} When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. @@ -66,7 +66,7 @@ These tests are run both with the latest available version of `Nextflow` and als - On your own fork, make a new branch `patch` based on `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- A PR should be made on `master` from patch to directly this particular bug. +- Open a pull-request from `patch` to `master` with the changes. {% if is_nfcore -%} @@ -78,13 +78,13 @@ For further information/help, please consult the [{{ name }} documentation](http ## Pipeline contribution conventions -To make the {{ name }} code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. +To make the `{{ name }}` code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. ### Adding a new step If you wish to contribute a new step, please use the following coding standards: -1. Define the corresponding input channel into your new process from the expected previous process channel +1. Define the corresponding input channel into your new process from the expected previous process channel. 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new parameters to `nextflow.config` with a default (see below). @@ -95,17 +95,17 @@ If you wish to contribute a new step, please use the following coding standards: {%- if multiqc %} 9. Update MultiQC config `assets/multiqc_config.yml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module. 10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`. - {% endif %} + {%- endif %} ### Default values -Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. +Parameters should be initialised / defined with default values within the `params` scope in `nextflow.config`. Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json`. ### Default processes resource requirements -Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. +Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/main/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. The process resources can be passed on to the tool dynamically within the process with the `${task.cpus}` and `${task.memory}` variables in the `script:` block. @@ -139,4 +139,4 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) - {% endif %} + {%- endif %} diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index dc0450be43..1ca2ac2c74 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -14,16 +14,19 @@ on: jobs: run-platform: name: Run AWS full tests - if: github.repository == '{{ name }}' && github.event.review.state == 'approved' + # run only if the PR is approved by at least 2 reviewers and against the master branch or manually triggered + if: github.repository == '{{ name }}' && github.event.review.state == 'approved' && github.event.pull_request.base.ref == 'master' || github.event_name == 'workflow_dispatch' runs-on: ubuntu-latest steps: - uses: octokit/request-action@v2.x + if: github.event_name != 'workflow_dispatch' id: check_approvals with: - route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.review.number }}/reviews + route: GET /repos/{%- raw -%}${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews?per_page=100 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - id: test_variables + if: github.event_name != 'workflow_dispatch' run: | JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}'{% endraw %} CURRENT_APPROVALS_COUNT=$(echo $JSON_RESPONSE | jq -c '[.[] | select(.state | contains("APPROVED")) ] | length') diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 63fa99cec8..9db393d9f0 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -1,5 +1,5 @@ name: nf-core CI -# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors +# {% raw %}This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors on: push: branches: @@ -7,40 +7,79 @@ on: pull_request: release: types: [published] + workflow_dispatch: env: NXF_ANSI_LOG: false + NXF_SINGULARITY_CACHEDIR: ${{ github.workspace }}/.singularity + NXF_SINGULARITY_LIBRARYDIR: ${{ github.workspace }}/.singularity concurrency: - group: "{% raw %}${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}{% endraw %}" + group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" cancel-in-progress: true jobs: test: - name: Run pipeline with test data + name: "Run pipeline with test data (${{ matrix.NXF_VER }} | ${{ matrix.test_name }} | ${{ matrix.profile }})" # Only run on push if this is the nf-core dev branch (merged PRs) - if: "{% raw %}${{{% endraw %} github.event_name != 'push' || (github.event_name == 'push' && github.repository == '{{ name }}') {% raw %}}}{% endraw %}" + if: "${{{% endraw %} github.event_name != 'push' || (github.event_name == 'push' && github.repository == '{{ name }}') {% raw %}}}" runs-on: ubuntu-latest strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" + profile: + - "conda" + - "docker" + - "singularity" + test_name: + - "test" + isMaster: + - ${{ github.base_ref == 'master' }} + # Exclude conda and singularity on dev + exclude: + - isMaster: false + profile: "conda" + - isMaster: false + profile: "singularity" steps: - name: Check out pipeline code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - name: Install Nextflow + - name: Set up Nextflow uses: nf-core/setup-nextflow@v2 with: - version: "{% raw %}${{ matrix.NXF_VER }}{% endraw %}" + version: "${{ matrix.NXF_VER }}" - - name: Disk space cleanup + - name: Set up Apptainer + if: matrix.profile == 'singularity' + uses: eWaterCycle/setup-apptainer@main + + - name: Set up Singularity + if: matrix.profile == 'singularity' + run: | + mkdir -p $NXF_SINGULARITY_CACHEDIR + mkdir -p $NXF_SINGULARITY_LIBRARYDIR + + - name: Set up Miniconda + if: matrix.profile == 'conda' + uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3 + with: + miniconda-version: "latest" + auto-update-conda: true + conda-solver: libmamba + channels: conda-forge,bioconda + + - name: Set up Conda + if: matrix.profile == 'conda' + run: | + echo $(realpath $CONDA)/condabin >> $GITHUB_PATH + echo $(realpath python) >> $GITHUB_PATH + + - name: Clean up Disk space uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 - - name: Run pipeline with test data - # TODO nf-core: You can customise CI pipeline run tests as required - # For example: adding multiple test runs with different parameters - # Remember that you can parallelise this by using strategy.matrix + - name: "Run pipeline with test data ${{ matrix.NXF_VER }} | ${{ matrix.test_name }} | ${{ matrix.profile }}" run: | - nextflow run ${GITHUB_WORKSPACE} -profile test,docker --outdir ./results + nextflow run ${GITHUB_WORKSPACE} -profile ${{ matrix.test_name }},${{ matrix.profile }} --outdir ./results{% endraw %} diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index e7a28e5ac4..1bc42469c4 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -35,13 +35,15 @@ jobs: - name: Disk space cleanup uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" architecture: "x64" - - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 + + - name: Setup Apptainer + uses: eWaterCycle/setup-apptainer@4bb22c52d4f63406c49e94c804632975787312b3 # v2.0.0 with: - singularity-version: 3.8.3 + apptainer-version: 1.3.4 - name: Install dependencies run: | @@ -54,33 +56,64 @@ jobs: echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} + - name: Make a cache directory for the container images + run: | + mkdir -p ./singularity_container_images + - name: Download the pipeline env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images run: | nf-core pipelines download ${{ env.REPO_LOWERCASE }} \ --revision ${{ env.REPO_BRANCH }} \ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ --compress "none" \ --container-system 'singularity' \ - --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ + --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io/library/" \ --container-cache-utilisation 'amend' \ - --download-configuration + --download-configuration 'yes' - name: Inspect download run: tree ./${{ env.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} + - name: Count the downloaded number of container images + id: count_initial + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Initial container image count: $image_count" + echo "IMAGE_COUNT_INITIAL=$image_count" >> ${GITHUB_ENV} + - name: Run the downloaded pipeline (stub) id: stub_run_pipeline continue-on-error: true env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results - name: Run the downloaded pipeline (stub run not supported) id: run_pipeline if: ${{ job.steps.stub_run_pipeline.status == failure() }} env: - NXF_SINGULARITY_CACHEDIR: ./ + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images NXF_SINGULARITY_HOME_MOUNT: true - run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %}{% endif %} + run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results + + - name: Count the downloaded number of container images + id: count_afterwards + run: | + image_count=$(ls -1 ./singularity_container_images | wc -l | xargs) + echo "Post-pipeline run container image count: $image_count" + echo "IMAGE_COUNT_AFTER=$image_count" >> ${GITHUB_ENV} + + - name: Compare container image counts + run: | + if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then + initial_count=${{ env.IMAGE_COUNT_INITIAL }} + final_count=${{ env.IMAGE_COUNT_AFTER }} + difference=$((final_count - initial_count)) + echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" + tree ./singularity_container_images + exit 1 + else + echo "The pipeline can be downloaded successfully!" + fi{% endraw %}{% endif %} diff --git a/nf_core/pipeline-template/.github/workflows/fix-linting.yml b/nf_core/pipeline-template/.github/workflows/fix-linting.yml index 18e6f9e158..4fa3d54d75 100644 --- a/nf_core/pipeline-template/.github/workflows/fix-linting.yml +++ b/nf_core/pipeline-template/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -32,7 +32,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} # Install and run pre-commit - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index dbba830ec1..cfdbcc12a9 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -14,10 +14,10 @@ jobs: pre-commit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up Python 3.12 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" @@ -31,21 +31,21 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Install Nextflow uses: nf-core/setup-nextflow@v2 - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.12" architecture: "x64" - name: read .nf-core.yml - uses: pietrobolcato/action-read-yaml@1.0.0 + uses: pietrobolcato/action-read-yaml@1.1.0 id: read_yml with: - config: ${{ github.workspace }}/.nf-core.yaml + config: ${{ github.workspace }}/.nf-core.yml - name: Install dependencies run: | @@ -74,7 +74,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4 + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4 with: name: linting-logs path: | diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 908dcea159..63b20bb311 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 + uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7 with: workflow: linting.yml workflow_conclusion: completed diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml index 8fee061fdd..e1b654d34b 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml @@ -12,7 +12,7 @@ jobs: - name: get topics and convert to hashtags id: get_topics run: | - echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" >> $GITHUB_OUTPUT + echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" | sed 's/-//g' >> $GITHUB_OUTPUT - uses: rzr/fediverse-action@master with: @@ -31,7 +31,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5 with: python-version: "3.10" - name: Install dependencies diff --git a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml index 58db2eb63a..27737afb47 100644 --- a/nf_core/pipeline-template/.github/workflows/template_version_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/template_version_comment.yml @@ -9,10 +9,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + with: + ref: ${{ github.event.pull_request.head.sha }} - name: Read template version from .nf-core.yml - uses: pietrobolcato/action-read-yaml@1.0.0 + uses: nichmor/minimal-read-yaml@v0.0.2 id: read_yml with: config: ${{ github.workspace }}/.nf-core.yml @@ -24,20 +26,21 @@ jobs: - name: Check nf-core outdated id: nf_core_outdated - run: pip list --outdated | grep nf-core + run: echo "OUTPUT=$(pip list --outdated | grep nf-core)" >> ${GITHUB_ENV} - name: Post nf-core template version comment uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 if: | - ${{ steps.nf_core_outdated.outputs.stdout }} =~ 'nf-core' + contains(env.OUTPUT, 'nf-core') with: repo-token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} allow-repeats: false message: | - ## :warning: Newer version of the nf-core template is available. - - Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. - Please update your pipeline to the latest version. - - For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). + > [!WARNING] + > Newer version of the nf-core template is available. + > + > Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. + > Please update your pipeline to the latest version. + > + > For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). #{%- endraw %} diff --git a/nf_core/pipeline-template/.gitignore b/nf_core/pipeline-template/.gitignore index 5124c9ac77..a42ce0162e 100644 --- a/nf_core/pipeline-template/.gitignore +++ b/nf_core/pipeline-template/.gitignore @@ -6,3 +6,4 @@ results/ testing/ testing* *.pyc +null/ diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml index 4dc0f1dcd7..9e9f0e1c4e 100644 --- a/nf_core/pipeline-template/.pre-commit-config.yaml +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: - prettier@3.2.5 - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: "2.7.3" + rev: "3.0.3" hooks: - id: editorconfig-checker alias: ec diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index c8e8ad9e11..7ecc9b61cb 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -1,4 +1,4 @@ -{%- if email %} +{% if email -%} email_template.html {%- endif %} {%- if adaptivecard %} diff --git a/nf_core/pipeline-template/.vscode/settings.json b/nf_core/pipeline-template/.vscode/settings.json new file mode 100644 index 0000000000..a33b527cc7 --- /dev/null +++ b/nf_core/pipeline-template/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "markdown.styles": ["public/vscode_markdown.css"] +} diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 2373f1de7f..16da9a4207 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -15,10 +15,14 @@ {% if fastqc %}- [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. -> {% endif %} > {% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) + +{%- endif %} + +{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. -> {%- endif %} + +{%- endif %} ## Software packaging/containerisation tools @@ -41,4 +45,5 @@ - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. - > {%- endif %} + + {%- endif %} diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index beb45ed511..a8f2e60546 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -7,7 +7,7 @@ -{%- else %} +{% else %} # {{ name }} @@ -20,7 +20,7 @@ [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) [![nf-test](https://img.shields.io/badge/unit_tests-nf--test-337ab7.svg)](https://www.nf-test.com) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.10.0-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -128,7 +128,7 @@ An extensive list of references for the tools used by the pipeline can be found You can cite the `nf-core` publication as follows: {% else -%} -This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/master/LICENSE). +This pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/main/LICENSE). {% endif -%} diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index cd4e539b31..e6fd878986 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -3,11 +3,11 @@ report_comment: > This report has been generated by the {{ name }} analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {%- else %} + {%- else -%} This report has been generated by the {{ name }} analysis pipeline.{% if is_nfcore %} For information about how to interpret these results, please see the documentation.{% endif %} - {% endif %} + {%- endif %} report_section_order: "{{ name_noslash }}-methods-description": order: -1000 diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index 9c62bf0634..16a4fe6cdf 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -11,46 +11,46 @@ process { // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' // Process-specific resource requirements - // NOTE - Please try and re-use the labels below as much as possible. + // NOTE - Please try and reuse the labels below as much as possible. // These labels are used and recognised by default in DSL2 files hosted on nf-core/modules. // If possible, it would be nice to keep the same label naming convention when // adding in your local modules too. // TODO nf-core: Customise requirements for specific processes. // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors withLabel:process_single { - cpus = { check_max( 1 , 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_low { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 12.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 2 * task.attempt } + memory = { 12.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_medium { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 36.GB * task.attempt, 'memory' ) } - time = { check_max( 8.h * task.attempt, 'time' ) } + cpus = { 6 * task.attempt } + memory = { 36.GB * task.attempt } + time = { 8.h * task.attempt } } withLabel:process_high { - cpus = { check_max( 12 * task.attempt, 'cpus' ) } - memory = { check_max( 72.GB * task.attempt, 'memory' ) } - time = { check_max( 16.h * task.attempt, 'time' ) } + cpus = { 12 * task.attempt } + memory = { 72.GB * task.attempt } + time = { 16.h * task.attempt } } withLabel:process_long { - time = { check_max( 20.h * task.attempt, 'time' ) } + time = { 20.h * task.attempt } } withLabel:process_high_memory { - memory = { check_max( 200.GB * task.attempt, 'memory' ) } + memory = { 200.GB * task.attempt } } withLabel:error_ignore { errorStrategy = 'ignore' diff --git a/nf_core/pipeline-template/conf/igenomes_ignored.config b/nf_core/pipeline-template/conf/igenomes_ignored.config new file mode 100644 index 0000000000..b4034d8243 --- /dev/null +++ b/nf_core/pipeline-template/conf/igenomes_ignored.config @@ -0,0 +1,9 @@ +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Nextflow config file for iGenomes paths +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Empty genomes dictionary to use when igenomes is ignored. +---------------------------------------------------------------------------------------- +*/ + +params.genomes = [:] diff --git a/nf_core/pipeline-template/conf/modules.config b/nf_core/pipeline-template/conf/modules.config index 35e861d9b1..1614e2b1a9 100644 --- a/nf_core/pipeline-template/conf/modules.config +++ b/nf_core/pipeline-template/conf/modules.config @@ -18,13 +18,15 @@ process { saveAs: { filename -> filename.equals('versions.yml') ? null : filename } ] - {% if fastqc -%} + {%- if fastqc %} + withName: FASTQC { ext.args = '--quiet' } {%- endif %} {%- if multiqc %} + withName: 'MULTIQC' { ext.args = { params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' } publishDir = [ diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index 827e21b7b7..bea6f670d0 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -10,15 +10,18 @@ ---------------------------------------------------------------------------------------- */ +process { + resourceLimits = [ + cpus: 4, + memory: '15.GB', + time: '1.h' + ] +} + params { config_profile_name = 'Test profile' config_profile_description = 'Minimal test dataset to check pipeline function' - // Limit resources so that this can run on GitHub Actions - max_cpus = 2 - max_memory = '6.GB' - max_time = '6.h' - // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets // TODO nf-core: Give any required params for the test so that command line flags are not needed diff --git a/nf_core/pipeline-template/docs/output.md b/nf_core/pipeline-template/docs/output.md index 5e42d50cc5..83d5d23fe3 100644 --- a/nf_core/pipeline-template/docs/output.md +++ b/nf_core/pipeline-template/docs/output.md @@ -12,12 +12,16 @@ The directories listed below will be created in the results directory after the The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: -{% if fastqc %}- [FastQC](#fastqc) - Raw read QC{% endif %} -{% if multiqc %}- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline{% endif %} +{% if fastqc -%} +- [FastQC](#fastqc) - Raw read QC + {%- endif %} + {%- if multiqc %} +- [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline + {%- endif %} - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution -{%- if fastqc %} +{% if fastqc -%} ### FastQC @@ -32,7 +36,8 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). {%- endif %} -{%- if multiqc %} + +{% if multiqc -%} ### MultiQC @@ -49,7 +54,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes d [MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . -{% endif %} +{%- endif %} ### Pipeline information @@ -58,7 +63,8 @@ Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQ - `pipeline_info/` - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. - {% if email %}- Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. {% endif %} + {%- if email %} + - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline.{% endif %} - Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. - Parameters used by the pipeline run: `params.json`. diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index ae2761797a..16e6220aaf 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -79,9 +79,8 @@ If you wish to repeatedly use the same parameters for multiple runs, rather than Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. -:::warning -Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). -::: +> [!WARNING] +> Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). The above pipeline run specified with a params file in yaml format: @@ -110,23 +109,21 @@ nextflow pull {{ name }} ### Reproducibility -It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. +It is a good idea to specify the pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. {% if multiqc %}For example, at the bottom of the MultiQC reports.{% endif %} -To further assist in reproducbility, you can use share and re-use [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. +To further assist in reproducibility, you can use share and reuse [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. -:::tip -If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. -::: +> [!TIP] +> If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. ## Core Nextflow arguments -:::note -These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen). -::: +> [!NOTE] +> These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen) ### `-profile` @@ -134,19 +131,18 @@ Use this parameter to choose a configuration profile. Profiles can give configur Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Apptainer, Conda) - see below. -:::info -We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. -::: +> [!IMPORTANT] +> We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. {%- if nf_core_configs %} -The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). +The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to check if your system is suported, please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). {% else %} {% endif %} Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! They are loaded in sequence, so later profiles can overwrite earlier profiles. -If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer enviroment. +If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer environment. {%- if test_config %} @@ -185,13 +181,13 @@ Specify the path to a specific config file (this is a core Nextflow command). Se ### Resource requests -Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. +Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the pipeline steps, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher resources request (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. To change the resource requests, please see the [max resources](https://nf-co.re/docs/usage/configuration#max-resources) and [tuning workflow resources](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources) section of the nf-core website. ### Custom Containers -In some cases you may wish to change which container or conda environment a step of the pipeline uses for a particular tool. By default nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However in some cases the pipeline specified version maybe out of date. +In some cases, you may wish to change the container or conda environment used by a pipeline steps for a particular tool. By default, nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However, in some cases the pipeline specified version maybe out of date. To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 6516ebf90e..70bdc274e2 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -84,7 +84,8 @@ workflow { params.outdir, params.input ) - {% endif %} + {%- endif %} + // // WORKFLOW: Run main workflow // @@ -108,8 +109,10 @@ workflow { {%- endif %} params.outdir, params.monochrome_logs, - {% if adaptivecard or slackreport %}params.hook_url,{% endif %} - {% if multiqc %}{{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} + {%- if adaptivecard or slackreport %} + params.hook_url,{% endif %} + {%- if multiqc %} + {{ prefix_nodash|upper }}_{{ short_name|upper }}.out.multiqc_report{% endif %} ) {%- endif %} } diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 367202155a..90c5728d9a 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -8,12 +8,12 @@ {%- if fastqc %} "fastqc": { "branch": "master", - "git_sha": "285a50500f9e02578d90b3ce6382ea3c30216acd", + "git_sha": "dc94b6ee04a05ddb9f7ae050712ff30a13149164", "installed_by": ["modules"] }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { "branch": "master", - "git_sha": "b7ebe95761cd389603f9cc0e0dc384c0f663815a", + "git_sha": "cf17ca47590cc578dfb47db1c2a44ef86f89976d", "installed_by": ["modules"] } {%- endif %} @@ -23,17 +23,17 @@ "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "d20fb2a9cc3e2835e9d067d1046a63252eb17352", + "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "2fdce49d30c0254f76bc0f13c55c17455c1251ab", + "git_sha": "9a1e8bb6a5d205cf7807dcefca872a3314b2f3e6", "installed_by": ["subworkflows"] }{% if nf_schema %}, "utils_nfschema_plugin": { "branch": "master", - "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c", + "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e", "installed_by": ["subworkflows"] }{% endif %} } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml index 0d5be45f26..691d4c7638 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/environment.yml @@ -1,4 +1,3 @@ -name: fastqc channels: - conda-forge - bioconda diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index d79f1c862d..752c3a10c6 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -24,9 +24,12 @@ process FASTQC { // Make list of old name and new name pairs to use for renaming in the bash while loop def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } def rename_to = old_new_pairs*.join(' ').join(' ') - def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + def renamed_files = old_new_pairs.collect{ _old_name, new_name -> new_name }.join(' ') - def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') + // The total amount of allocated RAM by FastQC is equal to the number of threads defined (--threads) time the amount of RAM defined (--memory) + // https://github.com/s-andrews/FastQC/blob/1faeea0412093224d7f6a07f777fad60a5650795/fastqc#L211-L222 + // Dividing the task.memory by task.cpu allows to stick to requested amount of RAM in the label + def memory_in_mb = MemoryUnit.of("${task.memory}").toUnit('MB') / task.cpus // FastQC memory value allowed range (100 - 10000) def fastqc_memory = memory_in_mb > 10000 ? 10000 : (memory_in_mb < 100 ? 100 : memory_in_mb) diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index ee5507e06b..2b2e62b8ae 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -11,40 +11,50 @@ tools: FastQC gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%A/C/G/T). + You get information about adapter contamination and other overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ licence: ["GPL-2.0-only"] + identifier: biotools:fastqc input: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - - reads: - type: file - description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, - respectively. + - - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. output: - - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] - html: - type: file - description: FastQC report - pattern: "*_{fastqc.html}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - "*.html": + type: file + description: FastQC report + pattern: "*_{fastqc.html}" - zip: - type: file - description: FastQC report archive - pattern: "*_{fastqc.zip}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - "*.zip": + type: file + description: FastQC report archive + pattern: "*_{fastqc.zip}" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - versions.yml: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@drpatelh" - "@grst" diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test index 70edae4d99..e9d79a074e 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test @@ -23,17 +23,14 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. - // looks like this:
Mon 2 Oct 2023
test.gz
- // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_single") } + { assert process.success }, + // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. + // looks like this:
Mon 2 Oct 2023
test.gz
+ // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -54,16 +51,14 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, - { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, - { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, - { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, - { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_paired") } + { assert process.success }, + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -83,13 +78,11 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_interleaved") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -109,13 +102,11 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_bam") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -138,22 +129,20 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, - { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, - { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, - { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, - { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, - { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, - { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, - { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, - { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, - { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_multiple") } + { assert process.success }, + { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, + { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, + { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, + { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, + { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, + { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, + { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, + { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, + { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, + { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } @@ -173,21 +162,18 @@ nextflow_process { then { assertAll ( - { assert process.success }, - - { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, - { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, - { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, - - { assert snapshot(process.out.versions).match("fastqc_versions_custom_prefix") } + { assert process.success }, + { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, + { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, + { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, + { assert snapshot(process.out.versions).match() } ) } } test("sarscov2 single-end [fastq] - stub") { - options "-stub" - + options "-stub" when { process { """ @@ -201,12 +187,123 @@ nextflow_process { then { assertAll ( - { assert process.success }, - { assert snapshot(process.out.html.collect { file(it[1]).getName() } + - process.out.zip.collect { file(it[1]).getName() } + - process.out.versions ).match("fastqc_stub") } + { assert process.success }, + { assert snapshot(process.out).match() } ) } } + test("sarscov2 paired-end [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 interleaved [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 paired-end [bam] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 multiple [fastq] - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [id: 'test', single_end: false], // meta map + [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true), + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ] + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } + + test("sarscov2 custom_prefix - stub") { + + options "-stub" + when { + process { + """ + input[0] = Channel.of([ + [ id:'mysample', single_end:true ], // meta map + file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) + ]) + """ + } + } + + then { + assertAll ( + { assert process.success }, + { assert snapshot(process.out).match() } + ) + } + } } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap index 86f7c31154..d5db3092fb 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/tests/main.nf.test.snap @@ -1,88 +1,392 @@ { - "fastqc_versions_interleaved": { + "sarscov2 custom_prefix": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:07.293713" + "timestamp": "2024-07-22T11:02:16.374038" }, - "fastqc_stub": { + "sarscov2 single-end [fastq] - stub": { "content": [ - [ - "test.html", - "test.zip", - "versions.yml:md5,e1cc25ca8af856014824abd842e93978" - ] + { + "0": [ + [ + { + "id": "test", + "single_end": true + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": true + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": true + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": true + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:24.993809" + }, + "sarscov2 custom_prefix - stub": { + "content": [ + { + "0": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "mysample", + "single_end": true + }, + "mysample.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:31:01.425198" + "timestamp": "2024-07-22T11:03:10.93942" }, - "fastqc_versions_multiple": { + "sarscov2 interleaved [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:55.797907" + "timestamp": "2024-07-22T11:01:42.355718" }, - "fastqc_versions_bam": { + "sarscov2 paired-end [bam]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:40:26.795862" + "timestamp": "2024-07-22T11:01:53.276274" }, - "fastqc_versions_single": { + "sarscov2 multiple [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:39:27.043675" + "timestamp": "2024-07-22T11:02:05.527626" }, - "fastqc_versions_paired": { + "sarscov2 paired-end [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:01:31.188871" + }, + "sarscov2 paired-end [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:34.273566" + }, + "sarscov2 multiple [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:39:47.584191" + "timestamp": "2024-07-22T11:03:02.304411" }, - "fastqc_versions_custom_prefix": { + "sarscov2 single-end [fastq]": { "content": [ [ "versions.yml:md5,e1cc25ca8af856014824abd842e93978" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:01:19.095607" + }, + "sarscov2 interleaved [fastq] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" + }, + "timestamp": "2024-07-22T11:02:44.640184" + }, + "sarscov2 paired-end [bam] - stub": { + "content": [ + { + "0": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "1": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "2": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "html": [ + [ + { + "id": "test", + "single_end": false + }, + "test.html:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ], + "versions": [ + "versions.yml:md5,e1cc25ca8af856014824abd842e93978" + ], + "zip": [ + [ + { + "id": "test", + "single_end": false + }, + "test.zip:md5,d41d8cd98f00b204e9800998ecf8427e" + ] + ] + } + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.04.3" }, - "timestamp": "2024-01-31T17:41:14.576531" + "timestamp": "2024-07-22T11:02:53.550742" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml index 329ddb4870..6f5b867b76 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -1,6 +1,5 @@ -name: multiqc channels: - conda-forge - bioconda dependencies: - - bioconda::multiqc=1.21 + - bioconda::multiqc=1.25.1 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 47ac352f94..cc0643e1d5 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -3,14 +3,16 @@ process MULTIQC { conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.21--pyhdfd78af_0' : - 'biocontainers/multiqc:1.21--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.25.1--pyhdfd78af_0' : + 'biocontainers/multiqc:1.25.1--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" path(multiqc_config) path(extra_multiqc_config) path(multiqc_logo) + path(replace_names) + path(sample_names) output: path "*multiqc_report.html", emit: report @@ -23,16 +25,22 @@ process MULTIQC { script: def args = task.ext.args ?: '' + def prefix = task.ext.prefix ? "--filename ${task.ext.prefix}.html" : '' def config = multiqc_config ? "--config $multiqc_config" : '' def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' - def logo = multiqc_logo ? /--cl-config 'custom_logo: "${multiqc_logo}"'/ : '' + def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : '' + def replace = replace_names ? "--replace-names ${replace_names}" : '' + def samples = sample_names ? "--sample-names ${sample_names}" : '' """ multiqc \\ --force \\ $args \\ $config \\ + $prefix \\ $extra_config \\ $logo \\ + $replace \\ + $samples \\ . cat <<-END_VERSIONS > versions.yml @@ -44,7 +52,7 @@ process MULTIQC { stub: """ mkdir multiqc_data - touch multiqc_plots + mkdir multiqc_plots touch multiqc_report.html cat <<-END_VERSIONS > versions.yml diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml index 45a9bc35e1..b16c187923 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml @@ -1,5 +1,6 @@ name: multiqc -description: Aggregate results from bioinformatics analyses across many samples into a single report +description: Aggregate results from bioinformatics analyses across many samples into + a single report keywords: - QC - bioinformatics tools @@ -12,40 +13,59 @@ tools: homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ licence: ["GPL-3.0-or-later"] + identifier: biotools:multiqc input: - - multiqc_files: - type: file - description: | - List of reports / files recognised by MultiQC, for example the html and zip output of FastQC - - multiqc_config: - type: file - description: Optional config yml for MultiQC - pattern: "*.{yml,yaml}" - - extra_multiqc_config: - type: file - description: Second optional config yml for MultiQC. Will override common sections in multiqc_config. - pattern: "*.{yml,yaml}" - - multiqc_logo: - type: file - description: Optional logo file for MultiQC - pattern: "*.{png}" + - - multiqc_files: + type: file + description: | + List of reports / files recognised by MultiQC, for example the html and zip output of FastQC + - - multiqc_config: + type: file + description: Optional config yml for MultiQC + pattern: "*.{yml,yaml}" + - - extra_multiqc_config: + type: file + description: Second optional config yml for MultiQC. Will override common sections + in multiqc_config. + pattern: "*.{yml,yaml}" + - - multiqc_logo: + type: file + description: Optional logo file for MultiQC + pattern: "*.{png}" + - - replace_names: + type: file + description: | + Optional two-column sample renaming file. First column a set of + patterns, second column a set of corresponding replacements. Passed via + MultiQC's `--replace-names` option. + pattern: "*.{tsv}" + - - sample_names: + type: file + description: | + Optional TSV file with headers, passed to the MultiQC --sample_names + argument. + pattern: "*.{tsv}" output: - report: - type: file - description: MultiQC report file - pattern: "multiqc_report.html" + - "*multiqc_report.html": + type: file + description: MultiQC report file + pattern: "multiqc_report.html" - data: - type: directory - description: MultiQC data dir - pattern: "multiqc_data" + - "*_data": + type: directory + description: MultiQC data dir + pattern: "multiqc_data" - plots: - type: file - description: Plots created by MultiQC - pattern: "*_data" + - "*_plots": + type: file + description: Plots created by MultiQC + pattern: "*_data" - versions: - type: file - description: File containing software versions - pattern: "versions.yml" + - versions.yml: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@abhi18av" - "@bunop" diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test index f1c4242ef2..33316a7ddb 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test @@ -8,6 +8,8 @@ nextflow_process { tag "modules_nfcore" tag "multiqc" + config "./nextflow.config" + test("sarscov2 single-end [fastqc]") { when { @@ -17,6 +19,8 @@ nextflow_process { input[1] = [] input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } @@ -41,6 +45,8 @@ nextflow_process { input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true)) input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } @@ -66,6 +72,8 @@ nextflow_process { input[1] = [] input[2] = [] input[3] = [] + input[4] = [] + input[5] = [] """ } } diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap index bfebd80298..2fcbb5ff7d 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap @@ -2,14 +2,14 @@ "multiqc_versions_single": { "content": [ [ - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:48:55.657331" + "timestamp": "2024-10-02T17:51:46.317523" }, "multiqc_stub": { "content": [ @@ -17,25 +17,25 @@ "multiqc_report.html", "multiqc_data", "multiqc_plots", - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:49:49.071937" + "timestamp": "2024-10-02T17:52:20.680978" }, "multiqc_versions_config": { "content": [ [ - "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" + "versions.yml:md5,41f391dcedce7f93ca188f3a3ffa0916" ] ], "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" + "nf-test": "0.9.0", + "nextflow": "24.04.4" }, - "timestamp": "2024-02-29T08:49:25.457567" + "timestamp": "2024-10-02T17:52:09.185842" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config new file mode 100644 index 0000000000..c537a6a3e7 --- /dev/null +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/nextflow.config @@ -0,0 +1,5 @@ +process { + withName: 'MULTIQC' { + ext.prefix = null + } +} diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 8201916b3f..21174bbdc5 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -14,6 +14,7 @@ params { input = null {%- if igenomes %} + // References genome = null igenomes_base = 's3://ngi-igenomes/igenomes/' @@ -21,31 +22,39 @@ params { {%- endif %} {%- if multiqc %} + // MultiQC options multiqc_config = null multiqc_title = null multiqc_logo = null max_multiqc_email_size = '25.MB' - {% if citations %}multiqc_methods_description = null{% endif %} + {%- if citations %} + multiqc_methods_description = null{% endif %} {%- endif %} // Boilerplate options outdir = null - {% if modules %}publish_dir_mode = 'copy'{% endif %} + {%- if modules %} + publish_dir_mode = 'copy'{% endif %} {%- if email %} email = null email_on_fail = null plaintext_email = false {%- endif %} - {% if modules %}monochrome_logs = false{% endif %} - {% if slackreport or adaptivecard %}hook_url = null{% endif %} - {% if nf_schema %}help = false + {%- if modules or nf_schema %} + monochrome_logs = false{% endif %} + {%- if slackreport or adaptivecard %} + hook_url = null{% endif %} + {%- if nf_schema %} + help = false help_full = false show_hidden = false{% endif %} version = false - {% if test_config %}pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} + {%- if test_config %} + pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} + trace_report_suffix = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - {%- if nf_core_configs %} + {%- if nf_core_configs -%} // Config options config_profile_name = null config_profile_description = null @@ -56,47 +65,34 @@ params { config_profile_url = null {%- endif %} - // Max resource options - // Defaults only, expecting to be overwritten - max_memory = '128.GB' - max_cpus = 16 - max_time = '240.h' - {%- if nf_schema %} + // Schema validation default options validate_params = true - {% endif %} + {%- endif %} } -{% if modules %} + +{% if modules -%} // Load base.config by default for all pipelines includeConfig 'conf/base.config' {%- else %} + +{% if modules -%} +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' +{%- endif %} + process { // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' } -{% endif %} -{% if nf_core_configs -%} -// Load nf-core custom profiles from different Institutions -try { - includeConfig "${params.custom_config_base}/nfcore_custom.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") -} - -// Load {{ name }} custom profiles from different institutions. -try { - includeConfig "${params.custom_config_base}/pipeline/{{ short_name }}.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") -} -{% endif -%} +{%- endif %} profiles { debug { @@ -197,6 +193,13 @@ profiles { executor.name = 'local' executor.cpus = 4 executor.memory = 8.GB + process { + resourceLimits = [ + memory: 8.GB, + cpus : 4, + time : 1.h + ] + } } {%- endif %} {%- if test_config %} @@ -205,6 +208,15 @@ profiles { {%- endif %} } +{% if nf_core_configs -%} +// Load nf-core custom profiles from different Institutions +includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null" + +// Load {{ name }} custom profiles from different institutions. +// TODO nf-core: Optionally, you can add a pipeline-specific nf-core config at https://github.com/nf-core/configs +// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/pipeline/{{ short_name }}.config" : "/dev/null" +{%- endif %} + // Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile // Will not be used unless Apptainer / Docker / Podman / Charliecloud / Singularity are enabled // Set to your registry if you have a mirror of containers @@ -216,12 +228,8 @@ charliecloud.registry = 'quay.io' {% if igenomes -%} // Load igenomes.config if required -if (!params.igenomes_ignore) { - includeConfig 'conf/igenomes.config' -} else { - params.genomes = [:] -} -{% endif -%} +includeConfig !params.igenomes_ignore ? 'conf/igenomes.config' : 'conf/igenomes_ignored.config' +{%- endif %} // Export these variables to prevent local Python/R libraries from conflicting with those in the container // The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container. @@ -247,31 +255,43 @@ set -C # No clobber - prevent output redirection from overwriting files. // Disable process selector warnings by default. Use debug profile to enable warnings. nextflow.enable.configProcessNamesValidation = false -def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true - file = "${params.outdir}/pipeline_info/execution_timeline_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_timeline_${params.trace_report_suffix}.html" } report { enabled = true - file = "${params.outdir}/pipeline_info/execution_report_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_report_${params.trace_report_suffix}.html" } trace { enabled = true - file = "${params.outdir}/pipeline_info/execution_trace_${trace_timestamp}.txt" + file = "${params.outdir}/pipeline_info/execution_trace_${params.trace_report_suffix}.txt" } dag { enabled = true - file = "${params.outdir}/pipeline_info/pipeline_dag_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/pipeline_dag_${params.trace_report_suffix}.html" } manifest { name = '{{ name }}' - author = """{{ author }}""" + author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead + contributors = [ + // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 + {%- for author_name in author.split(",") %} + [ + name: '{{ author_name }}', + affiliation: '', + email: '', + github: '', + contribution: [], // List of contribution types ('author', 'maintainer' or 'contributor') + orcid: '' + ], + {%- endfor %} + ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=23.10.0' + nextflowVersion = '!>=24.04.2' version = '{{ version }}' doi = '' } @@ -279,17 +299,18 @@ manifest { {% if nf_schema -%} // Nextflow plugins plugins { - id 'nf-schema@2.1.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet + id 'nf-schema@2.1.1' // Validation of pipeline parameters and creation of an input channel from a sample sheet } validation { - defaultIgnoreParams = ["genomes", "helpFull", "showHidden", "help-full", "show-hidden"] // The last 4 parameters are here because of a bug in nf-schema. This will be fixed in a later version + defaultIgnoreParams = ["genomes"] + monochromeLogs = params.monochrome_logs help { enabled = true - command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir " + command = "nextflow run {{ name }} -profile --input samplesheet.csv --outdir " fullParameter = "help_full" showHiddenParameter = "show_hidden" - {%- if is_nfcore %} + {% if is_nfcore -%} beforeText = """ -\033[2m----------------------------------------------------\033[0m- \033[0;32m,--.\033[0;30m/\033[0;32m,-.\033[0m @@ -297,15 +318,15 @@ validation { \033[0;34m |\\ | |__ __ / ` / \\ |__) |__ \033[0;33m} {\033[0m \033[0;34m | \\| | \\__, \\__/ | \\ |___ \033[0;32m\\`-._,-`-,\033[0m \033[0;32m`._,._,\'\033[0m -\033[0;35m ${manifest.name} ${manifest.version}\033[0m +\033[0;35m {{ name }} ${manifest.version}\033[0m -\033[2m----------------------------------------------------\033[0m- """ - afterText = """${manifest.doi ? "* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""} + afterText = """${manifest.doi ? "\n* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""} * The nf-core framework - https://doi.org/10.1038/s41587-020-0439-x + https://doi.org/10.1038/s41587-020-0439-x * Software dependencies - https://github.com/${manifest.name}/blob/master/CITATIONS.md + https://github.com/{{ name }}/blob/master/CITATIONS.md """{% endif %} }{% if is_nfcore %} summary { @@ -313,40 +334,4 @@ validation { afterText = validation.help.afterText }{% endif %} } -{% endif -%} -{%- if modules %} -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' -{% endif %} -// Function to ensure that resource requirements don't go beyond -// a maximum limit -def check_max(obj, type) { - if (type == 'memory') { - try { - if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) - return params.max_memory as nextflow.util.MemoryUnit - else - return obj - } catch (all) { - println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'time') { - try { - if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) - return params.max_time as nextflow.util.Duration - else - return obj - } catch (all) { - println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'cpus') { - try { - return Math.min( obj, params.max_cpus as int ) - } catch (all) { - println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" - return obj - } - } -} +{%- endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 446da25aed..3e59a8ba54 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -136,41 +136,6 @@ } }, {%- endif %} - "max_job_request_options": { - "title": "Max job request options", - "type": "object", - "fa_icon": "fab fa-acquisitions-incorporated", - "description": "Set the top limit for requested resources for any single job.", - "help_text": "If you are running on a smaller system, a pipeline step requesting more resources than are available may cause the Nextflow to stop the run with an error. These options allow you to cap the maximum resources requested by any single job so that the pipeline will run on your system.\n\nNote that you can not _increase_ the resources requested by any job using these options. For that you will need your own configuration file. See [the nf-core website](https://nf-co.re/usage/configuration) for details.", - "properties": { - "max_cpus": { - "type": "integer", - "description": "Maximum number of CPUs that can be requested for any single job.", - "default": 16, - "fa_icon": "fas fa-microchip", - "hidden": true, - "help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`" - }, - "max_memory": { - "type": "string", - "description": "Maximum amount of memory that can be requested for any single job.", - "default": "128.GB", - "fa_icon": "fas fa-memory", - "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", - "hidden": true, - "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`" - }, - "max_time": { - "type": "string", - "description": "Maximum amount of time that can be requested for any single job.", - "default": "240.h", - "fa_icon": "far fa-clock", - "pattern": "^(\\d+\\.?\\s*(s|m|h|d|day)\\s*)+$", - "hidden": true, - "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" - } - } - }, "generic_options": { "title": "Generic options", "type": "object", @@ -217,7 +182,7 @@ "fa_icon": "fas fa-file-upload", "hidden": true },{% endif %} - {%- if modules %} + {%- if modules or nf_schema %} "monochrome_logs": { "type": "boolean", "description": "Do not use coloured log outputs.", @@ -264,7 +229,13 @@ "description": "Base URL or local path to location of pipeline test dataset files", "default": "https://raw.githubusercontent.com/nf-core/test-datasets/", "hidden": true - }{% endif %} + }{% endif %}, + "trace_report_suffix": { + "type": "string", + "fa_icon": "far calendar", + "description": "Suffix to add to the trace report filename. Default is the date and time in the format yyyy-MM-dd_HH-mm-ss.", + "hidden": true + } } } }, @@ -278,9 +249,6 @@ {% if nf_core_configs %}{ "$ref": "#/$defs/institutional_config_options" },{% endif %} - { - "$ref": "#/$defs/max_job_request_options" - }, { "$ref": "#/$defs/generic_options" } diff --git a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf index ae66f3674c..be5776b836 100644 --- a/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/local/utils_nfcore_pipeline_pipeline/main.nf @@ -22,9 +22,9 @@ include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW TO INITIALISE PIPELINE -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow PIPELINE_INITIALISATION { @@ -51,7 +51,8 @@ workflow PIPELINE_INITIALISATION { workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 ) - {% if nf_schema %} + {%- if nf_schema %} + // // Validate parameters and generate parameter summary to stdout // @@ -60,7 +61,7 @@ workflow PIPELINE_INITIALISATION { validate_params, null ) - {% endif %} + {%- endif %} // // Check config provided to the pipeline @@ -70,6 +71,7 @@ workflow PIPELINE_INITIALISATION { ) {%- if igenomes %} + // // Custom validation for pipeline parameters // @@ -96,8 +98,8 @@ workflow PIPELINE_INITIALISATION { } } .groupTuple() - .map { - validateInputSamplesheet(it) + .map { samplesheet -> + validateInputSamplesheet(samplesheet) } .map { meta, fastqs -> @@ -111,9 +113,9 @@ workflow PIPELINE_INITIALISATION { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW FOR PIPELINE COMPLETION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow PIPELINE_COMPLETION { @@ -123,11 +125,13 @@ workflow PIPELINE_COMPLETION { email // string: email address email_on_fail // string: email address sent on pipeline failure plaintext_email // boolean: Send plain-text email instead of HTML - {% endif %} + {%- endif %} outdir // path: Path to output directory where results will be published monochrome_logs // boolean: Disable ANSI colour codes in log output - {% if adaptivecard or slackreport %}hook_url // string: hook URL for notifications{% endif %} - {% if multiqc %}multiqc_report // string: Path to MultiQC report{% endif %} + {%- if adaptivecard or slackreport %} + hook_url // string: hook URL for notifications{% endif %} + {%- if multiqc %} + multiqc_report // string: Path to MultiQC report{% endif %} main: {%- if nf_schema %} @@ -169,9 +173,9 @@ workflow PIPELINE_COMPLETION { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ {%- if igenomes %} @@ -190,7 +194,7 @@ def validateInputSamplesheet(input) { def (metas, fastqs) = input[1..2] // Check that multiple runs of the same sample are of the same datatype i.e. single-end / paired-end - def endedness_ok = metas.collect{ it.single_end }.unique().size == 1 + def endedness_ok = metas.collect{ meta -> meta.single_end }.unique().size == 1 if (!endedness_ok) { error("Please check input samplesheet -> Multiple runs of a sample must be of the same datatype i.e. single-end or paired-end: ${metas[0].id}") } @@ -235,8 +239,10 @@ def toolCitationText() { // Uncomment function in methodsDescriptionText to render in MultiQC report def citation_text = [ "Tools used in the workflow included:", - {% if fastqc %}"FastQC (Andrews 2010),",{% endif %} - {% if multiqc %}"MultiQC (Ewels et al. 2016)",{% endif %} + {%- if fastqc %} + "FastQC (Andrews 2010),",{% endif %} + {%- if multiqc %} + "MultiQC (Ewels et al. 2016)",{% endif %} "." ].join(' ').trim() @@ -248,15 +254,17 @@ def toolBibliographyText() { // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "
  • Author (2023) Pub name, Journal, DOI
  • " : "", // Uncomment function in methodsDescriptionText to render in MultiQC report def reference_text = [ - {% if fastqc %}"
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ",{% endif %} - {% if multiqc %}"
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • "{% endif %} + {%- if fastqc %} + "
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ",{% endif %} + {%- if multiqc %} + "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • "{% endif %} ].join(' ').trim() return reference_text } def methodsDescriptionText(mqc_methods_yaml) { - // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file + // Convert to a named map so can be used as with familiar NXF ${workflow} variable syntax in the MultiQC YML file def meta = [:] meta.workflow = workflow.toMap() meta["manifest_map"] = workflow.manifest.toMap() @@ -267,8 +275,10 @@ def methodsDescriptionText(mqc_methods_yaml) { // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers // Removing ` ` since the manifest.doi is a string and not a proper list def temp_doi_ref = "" - String[] manifest_doi = meta.manifest_map.doi.tokenize(",") - for (String doi_ref: manifest_doi) temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + def manifest_doi = meta.manifest_map.doi.tokenize(",") + manifest_doi.each { doi_ref -> + temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), " + } meta["doi_text"] = temp_doi_ref.substring(0, temp_doi_ref.length() - 2) } else meta["doi_text"] = "" meta["nodoi_text"] = meta.manifest_map.doi ? "" : "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index 28e32b200e..d6e593e852 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -3,13 +3,12 @@ // /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW DEFINITION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow UTILS_NEXTFLOW_PIPELINE { - take: print_version // boolean: print version dump_parameters // boolean: dump parameters @@ -22,7 +21,7 @@ workflow UTILS_NEXTFLOW_PIPELINE { // Print workflow version and exit on --version // if (print_version) { - log.info "${workflow.manifest.name} ${getWorkflowVersion()}" + log.info("${workflow.manifest.name} ${getWorkflowVersion()}") System.exit(0) } @@ -45,9 +44,9 @@ workflow UTILS_NEXTFLOW_PIPELINE { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ // @@ -72,11 +71,11 @@ def getWorkflowVersion() { // Dump pipeline parameters to a JSON file // def dumpParametersToJSON(outdir) { - def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') - def filename = "params_${timestamp}.json" - def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") - def jsonStr = groovy.json.JsonOutput.toJson(params) - temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr) + def timestamp = new java.util.Date().format('yyyy-MM-dd_HH-mm-ss') + def filename = "params_${timestamp}.json" + def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") + def jsonStr = groovy.json.JsonOutput.toJson(params) + temp_pf.text = groovy.json.JsonOutput.prettyPrint(jsonStr) nextflow.extension.FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") temp_pf.delete() @@ -91,9 +90,16 @@ def checkCondaChannels() { try { def config = parser.load("conda config --show channels".execute().text) channels = config.channels - } catch(NullPointerException | IOException e) { - log.warn "Could not verify conda channel configuration." - return + } + catch (NullPointerException e) { + log.debug(e) + log.warn("Could not verify conda channel configuration.") + return null + } + catch (IOException e) { + log.debug(e) + log.warn("Could not verify conda channel configuration.") + return null } // Check that all channels are present @@ -102,23 +108,19 @@ def checkCondaChannels() { def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean // Check that they are in the right order - def channel_priority_violation = false - - required_channels_in_order.eachWithIndex { channel, index -> - if (index < required_channels_in_order.size() - 1) { - channel_priority_violation |= !(channels.indexOf(channel) < channels.indexOf(required_channels_in_order[index+1])) - } - } + def channel_priority_violation = required_channels_in_order != channels.findAll { ch -> ch in required_channels_in_order } if (channels_missing | channel_priority_violation) { - log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + - " There is a problem with your Conda configuration!\n\n" + - " You will need to set-up the conda-forge and bioconda channels correctly.\n" + - " Please refer to https://bioconda.github.io/\n" + - " The observed channel order is \n" + - " ${channels}\n" + - " but the following channel order is required:\n" + - " ${required_channels_in_order}\n" + - "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + log.warn """\ + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + There is a problem with your Conda configuration! + You will need to set-up the conda-forge and bioconda channels correctly. + Please refer to https://bioconda.github.io/ + The observed channel order is + ${channels} + but the following channel order is required: + ${required_channels_in_order} + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + """.stripIndent(true) } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test index ca964ce8e1..02dbf094cd 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test @@ -52,10 +52,12 @@ nextflow_workflow { } then { - assertAll( - { assert workflow.success }, - { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } - ) + expect { + with(workflow) { + assert success + assert "nextflow_workflow v9.9.9" in stdout + } + } } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index cbd8495bb6..228dbff897 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -3,13 +3,12 @@ // /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SUBWORKFLOW DEFINITION -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ workflow UTILS_NFCORE_PIPELINE { - take: nextflow_cli_args @@ -22,9 +21,9 @@ workflow UTILS_NFCORE_PIPELINE { } /* -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FUNCTIONS -======================================================================================== +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ // @@ -33,12 +32,9 @@ workflow UTILS_NFCORE_PIPELINE { def checkConfigProvided() { def valid_config = true as Boolean if (workflow.profile == 'standard' && workflow.configFiles.size() <= 1) { - log.warn "[$workflow.manifest.name] You are attempting to run the pipeline without any custom configuration!\n\n" + - "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + - " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + - " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + - " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + - "Please refer to the quick start section and usage docs for the pipeline.\n " + log.warn( + "[${workflow.manifest.name}] You are attempting to run the pipeline without any custom configuration!\n\n" + "This will be dependent on your local compute environment but can be achieved via one or more of the following:\n" + " (1) Using an existing pipeline profile e.g. `-profile docker` or `-profile singularity`\n" + " (2) Using an existing nf-core/configs for your Institution e.g. `-profile crick` or `-profile uppmax`\n" + " (3) Using your own local custom config e.g. `-c /path/to/your/custom.config`\n\n" + "Please refer to the quick start section and usage docs for the pipeline.\n " + ) valid_config = false } return valid_config @@ -49,36 +45,17 @@ def checkConfigProvided() { // def checkProfileProvided(nextflow_cli_args) { if (workflow.profile.endsWith(',')) { - error "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + - "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + error( + "The `-profile` option cannot end with a trailing comma, please remove it and re-run the pipeline!\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + ) } if (nextflow_cli_args[0]) { - log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + - "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + log.warn( + "nf-core pipelines do not accept positional arguments. The positional argument `${nextflow_cli_args[0]}` has been detected.\n" + "HINT: A common mistake is to provide multiple values separated by spaces e.g. `-profile test, docker`.\n" + ) } } -// -// Citation string for pipeline -// -def workflowCitation() { - def temp_doi_ref = "" - def manifest_doi = workflow.manifest.doi.tokenize(",") - // Using a loop to handle multiple DOIs - // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers - // Removing ` ` since the manifest.doi is a string and not a proper list - manifest_doi.each { doi_ref -> - temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" - } - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + - "* The pipeline\n" + - temp_doi_ref + "\n" + - "* The nf-core framework\n" + - " https://doi.org/10.1038/s41587-020-0439-x\n\n" + - "* Software dependencies\n" + - " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" -} - // // Generate workflow version string // @@ -102,7 +79,7 @@ def getWorkflowVersion() { // def processVersionsFromYAML(yaml_file) { def yaml = new org.yaml.snakeyaml.Yaml() - def versions = yaml.load(yaml_file).collectEntries { k, v -> [ k.tokenize(':')[-1], v ] } + def versions = yaml.load(yaml_file).collectEntries { k, v -> [k.tokenize(':')[-1], v] } return yaml.dumpAsMap(versions).trim() } @@ -112,8 +89,8 @@ def processVersionsFromYAML(yaml_file) { def workflowVersionToYAML() { return """ Workflow: - $workflow.manifest.name: ${getWorkflowVersion()} - Nextflow: $workflow.nextflow.version + ${workflow.manifest.name}: ${getWorkflowVersion()} + Nextflow: ${workflow.nextflow.version} """.stripIndent().trim() } @@ -121,11 +98,7 @@ def workflowVersionToYAML() { // Get channel of software versions used in pipeline in YAML format // def softwareVersionsToYAML(ch_versions) { - return ch_versions - .unique() - .map { version -> processVersionsFromYAML(version) } - .unique() - .mix(Channel.of(workflowVersionToYAML())) + return ch_versions.unique().map { version -> processVersionsFromYAML(version) }.unique().mix(Channel.of(workflowVersionToYAML())) } // @@ -133,56 +106,35 @@ def softwareVersionsToYAML(ch_versions) { // def paramsSummaryMultiqc(summary_params) { def summary_section = '' - summary_params.keySet().each { group -> - def group_params = summary_params.get(group) // This gets the parameters of that particular group - if (group_params) { - summary_section += "

    $group

    \n" - summary_section += "
    \n" - group_params.keySet().sort().each { param -> - summary_section += "
    $param
    ${group_params.get(param) ?: 'N/A'}
    \n" + summary_params + .keySet() + .each { group -> + def group_params = summary_params.get(group) + // This gets the parameters of that particular group + if (group_params) { + summary_section += "

    ${group}

    \n" + summary_section += "
    \n" + group_params + .keySet() + .sort() + .each { param -> + summary_section += "
    ${param}
    ${group_params.get(param) ?: 'N/A'}
    \n" + } + summary_section += "
    \n" } - summary_section += "
    \n" } - } - def yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n" as String - yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" - yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" - yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" - yaml_file_text += "plot_type: 'html'\n" - yaml_file_text += "data: |\n" - yaml_file_text += "${summary_section}" + def yaml_file_text = "id: '${workflow.manifest.name.replace('/', '-')}-summary'\n" as String + yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n" + yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n" + yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n" + yaml_file_text += "plot_type: 'html'\n" + yaml_file_text += "data: |\n" + yaml_file_text += "${summary_section}" return yaml_file_text } -// -// nf-core logo -// -def nfCoreLogo(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) -} - -// -// Return dashed line -// -def dashedLine(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - return "-${colors.dim}----------------------------------------------------${colors.reset}-" -} - // // ANSII colours used for terminal logging // @@ -199,54 +151,54 @@ def logColours(monochrome_logs=true) { colorcodes['hidden'] = monochrome_logs ? '' : "\033[8m" // Regular Colors - colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" - colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" - colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" - colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" - colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" - colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" - colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" - colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" + colorcodes['black'] = monochrome_logs ? '' : "\033[0;30m" + colorcodes['red'] = monochrome_logs ? '' : "\033[0;31m" + colorcodes['green'] = monochrome_logs ? '' : "\033[0;32m" + colorcodes['yellow'] = monochrome_logs ? '' : "\033[0;33m" + colorcodes['blue'] = monochrome_logs ? '' : "\033[0;34m" + colorcodes['purple'] = monochrome_logs ? '' : "\033[0;35m" + colorcodes['cyan'] = monochrome_logs ? '' : "\033[0;36m" + colorcodes['white'] = monochrome_logs ? '' : "\033[0;37m" // Bold - colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" - colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" - colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" - colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" - colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" - colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" - colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" - colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" + colorcodes['bblack'] = monochrome_logs ? '' : "\033[1;30m" + colorcodes['bred'] = monochrome_logs ? '' : "\033[1;31m" + colorcodes['bgreen'] = monochrome_logs ? '' : "\033[1;32m" + colorcodes['byellow'] = monochrome_logs ? '' : "\033[1;33m" + colorcodes['bblue'] = monochrome_logs ? '' : "\033[1;34m" + colorcodes['bpurple'] = monochrome_logs ? '' : "\033[1;35m" + colorcodes['bcyan'] = monochrome_logs ? '' : "\033[1;36m" + colorcodes['bwhite'] = monochrome_logs ? '' : "\033[1;37m" // Underline - colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" - colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" - colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" - colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" - colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" - colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" - colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" - colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" + colorcodes['ublack'] = monochrome_logs ? '' : "\033[4;30m" + colorcodes['ured'] = monochrome_logs ? '' : "\033[4;31m" + colorcodes['ugreen'] = monochrome_logs ? '' : "\033[4;32m" + colorcodes['uyellow'] = monochrome_logs ? '' : "\033[4;33m" + colorcodes['ublue'] = monochrome_logs ? '' : "\033[4;34m" + colorcodes['upurple'] = monochrome_logs ? '' : "\033[4;35m" + colorcodes['ucyan'] = monochrome_logs ? '' : "\033[4;36m" + colorcodes['uwhite'] = monochrome_logs ? '' : "\033[4;37m" // High Intensity - colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" - colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" - colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" - colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" - colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" - colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" - colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" - colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" + colorcodes['iblack'] = monochrome_logs ? '' : "\033[0;90m" + colorcodes['ired'] = monochrome_logs ? '' : "\033[0;91m" + colorcodes['igreen'] = monochrome_logs ? '' : "\033[0;92m" + colorcodes['iyellow'] = monochrome_logs ? '' : "\033[0;93m" + colorcodes['iblue'] = monochrome_logs ? '' : "\033[0;94m" + colorcodes['ipurple'] = monochrome_logs ? '' : "\033[0;95m" + colorcodes['icyan'] = monochrome_logs ? '' : "\033[0;96m" + colorcodes['iwhite'] = monochrome_logs ? '' : "\033[0;97m" // Bold High Intensity - colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" - colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" - colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" - colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" - colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" - colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" - colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" - colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" + colorcodes['biblack'] = monochrome_logs ? '' : "\033[1;90m" + colorcodes['bired'] = monochrome_logs ? '' : "\033[1;91m" + colorcodes['bigreen'] = monochrome_logs ? '' : "\033[1;92m" + colorcodes['biyellow'] = monochrome_logs ? '' : "\033[1;93m" + colorcodes['biblue'] = monochrome_logs ? '' : "\033[1;94m" + colorcodes['bipurple'] = monochrome_logs ? '' : "\033[1;95m" + colorcodes['bicyan'] = monochrome_logs ? '' : "\033[1;96m" + colorcodes['biwhite'] = monochrome_logs ? '' : "\033[1;97m" return colorcodes } @@ -261,14 +213,16 @@ def attachMultiqcReport(multiqc_report) { mqc_report = multiqc_report.getVal() if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) { if (mqc_report.size() > 1) { - log.warn "[$workflow.manifest.name] Found multiple reports from process 'MULTIQC', will use only one" + log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one") } mqc_report = mqc_report[0] } } - } catch (all) { + } + catch (Exception msg) { + log.debug(msg) if (multiqc_report) { - log.warn "[$workflow.manifest.name] Could not attach MultiQC report to summary email" + log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") } } return mqc_report @@ -280,26 +234,35 @@ def attachMultiqcReport(multiqc_report) { def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true, multiqc_report=null) { // Set up the e-mail variables - def subject = "[$workflow.manifest.name] Successful: $workflow.runName" + def subject = "[${workflow.manifest.name}] Successful: ${workflow.runName}" if (!workflow.success) { - subject = "[$workflow.manifest.name] FAILED: $workflow.runName" + subject = "[${workflow.manifest.name}] FAILED: ${workflow.runName}" } def summary = [:] - summary_params.keySet().sort().each { group -> - summary << summary_params[group] - } + summary_params + .keySet() + .sort() + .each { group -> + summary << summary_params[group] + } def misc_fields = [:] misc_fields['Date Started'] = workflow.start misc_fields['Date Completed'] = workflow.complete misc_fields['Pipeline script file path'] = workflow.scriptFile misc_fields['Pipeline script hash ID'] = workflow.scriptId - if (workflow.repository) misc_fields['Pipeline repository Git URL'] = workflow.repository - if (workflow.commitId) misc_fields['Pipeline repository Git Commit'] = workflow.commitId - if (workflow.revision) misc_fields['Pipeline Git branch/tag'] = workflow.revision - misc_fields['Nextflow Version'] = workflow.nextflow.version - misc_fields['Nextflow Build'] = workflow.nextflow.build + if (workflow.repository) { + misc_fields['Pipeline repository Git URL'] = workflow.repository + } + if (workflow.commitId) { + misc_fields['Pipeline repository Git Commit'] = workflow.commitId + } + if (workflow.revision) { + misc_fields['Pipeline Git branch/tag'] = workflow.revision + } + misc_fields['Nextflow Version'] = workflow.nextflow.version + misc_fields['Nextflow Build'] = workflow.nextflow.build misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp def email_fields = [:] @@ -336,8 +299,8 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def email_html = html_template.toString() // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit - def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit + def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()] def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) def sendmail_html = sendmail_template.toString() @@ -346,30 +309,34 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def colors = logColours(monochrome_logs) as Map if (email_address) { try { - if (plaintext_email) { throw new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } + if (plaintext_email) { +new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html") sendmail_tf.withWriter { w -> w << sendmail_html } - [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-" - } catch (all) { + ['sendmail', '-t'].execute() << sendmail_html + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") + } + catch (Exception msg) { + log.debug(msg) + log.debug("Trying with mail instead of sendmail") // Catch failures and try with plaintext - def mail_cmd = [ 'mail', '-s', subject, '--content-type=text/html', email_address ] + def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] mail_cmd.execute() << email_html - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (mail)-" + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (mail)-") } } // Write summary e-mail HTML to a file def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html") output_hf.withWriter { w -> w << email_html } - nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html"); + nextflow.extension.FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html") output_hf.delete() // Write summary e-mail TXT to a file def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt") output_tf.withWriter { w -> w << email_txt } - nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt"); + nextflow.extension.FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt") output_tf.delete() } @@ -380,12 +347,14 @@ def completionSummary(monochrome_logs=true) { def colors = logColours(monochrome_logs) as Map if (workflow.success) { if (workflow.stats.ignoredCount == 0) { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Pipeline completed successfully${colors.reset}-" - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-" + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Pipeline completed successfully${colors.reset}-") } - } else { - log.info "-${colors.purple}[$workflow.manifest.name]${colors.red} Pipeline completed with errors${colors.reset}-" + else { + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.yellow} Pipeline completed successfully, but with errored process(es) ${colors.reset}-") + } + } + else { + log.info("-${colors.purple}[${workflow.manifest.name}]${colors.red} Pipeline completed with errors${colors.reset}-") } } @@ -394,21 +363,30 @@ def completionSummary(monochrome_logs=true) { // def imNotification(summary_params, hook_url) { def summary = [:] - summary_params.keySet().sort().each { group -> - summary << summary_params[group] - } + summary_params + .keySet() + .sort() + .each { group -> + summary << summary_params[group] + } def misc_fields = [:] - misc_fields['start'] = workflow.start - misc_fields['complete'] = workflow.complete - misc_fields['scriptfile'] = workflow.scriptFile - misc_fields['scriptid'] = workflow.scriptId - if (workflow.repository) misc_fields['repository'] = workflow.repository - if (workflow.commitId) misc_fields['commitid'] = workflow.commitId - if (workflow.revision) misc_fields['revision'] = workflow.revision - misc_fields['nxf_version'] = workflow.nextflow.version - misc_fields['nxf_build'] = workflow.nextflow.build - misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp + misc_fields['start'] = workflow.start + misc_fields['complete'] = workflow.complete + misc_fields['scriptfile'] = workflow.scriptFile + misc_fields['scriptid'] = workflow.scriptId + if (workflow.repository) { + misc_fields['repository'] = workflow.repository + } + if (workflow.commitId) { + misc_fields['commitid'] = workflow.commitId + } + if (workflow.revision) { + misc_fields['revision'] = workflow.revision + } + misc_fields['nxf_version'] = workflow.nextflow.version + misc_fields['nxf_build'] = workflow.nextflow.build + misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp def msg_fields = [:] msg_fields['version'] = getWorkflowVersion() @@ -433,13 +411,13 @@ def imNotification(summary_params, hook_url) { def json_message = json_template.toString() // POST - def post = new URL(hook_url).openConnection(); + def post = new URL(hook_url).openConnection() post.setRequestMethod("POST") post.setDoOutput(true) post.setRequestProperty("Content-Type", "application/json") - post.getOutputStream().write(json_message.getBytes("UTF-8")); - def postRC = post.getResponseCode(); - if (! postRC.equals(200)) { - log.warn(post.getErrorStream().getText()); + post.getOutputStream().write(json_message.getBytes("UTF-8")) + def postRC = post.getResponseCode() + if (!postRC.equals(200)) { + log.warn(post.getErrorStream().getText()) } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test index 1dc317f8f7..e43d208b1b 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -41,58 +41,6 @@ nextflow_function { } } - test("Test Function workflowCitation") { - - function "workflowCitation" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function nfCoreLogo") { - - function "nfCoreLogo" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function dashedLine") { - - function "dashedLine" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - test("Test Function without logColours") { function "logColours" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap index 1037232c9e..02c6701413 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap @@ -17,26 +17,6 @@ }, "timestamp": "2024-02-28T12:02:59.729647" }, - "Test Function nfCoreLogo": { - "content": [ - "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:10.562934" - }, - "Test Function workflowCitation": { - "content": [ - "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:07.019761" - }, "Test Function without logColours": { "content": [ { @@ -95,16 +75,6 @@ }, "timestamp": "2024-02-28T12:03:17.969323" }, - "Test Function dashedLine": { - "content": [ - "-\u001b[2m----------------------------------------------------\u001b[0m-" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:14.366181" - }, "Test Function with logColours": { "content": [ { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test index 842dc432af..8fb3016487 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -42,7 +42,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { @@ -94,7 +94,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 5b7e0ff143..6126f9ec69 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -5,12 +5,17 @@ */ {%- if modules %} -{% if fastqc %}include { FASTQC } from '../modules/nf-core/fastqc/main'{% endif %} -{% if multiqc %}include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} -{% if nf_schema %}include { paramsSummaryMap } from 'plugin/nf-schema'{% endif %} -{% if multiqc %}include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} +{%- if fastqc %} +include { FASTQC } from '../modules/nf-core/fastqc/main'{% endif %} +{%- if multiqc %} +include { MULTIQC } from '../modules/nf-core/multiqc/main'{% endif %} +{%- if nf_schema %} +include { paramsSummaryMap } from 'plugin/nf-schema'{% endif %} +{%- if multiqc %} +include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline'{% endif %} include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' -{% if citations or multiqc %}include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} +{%- if citations or multiqc %} +include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_{{ short_name }}_pipeline'{% endif %} {%- endif %} /* @@ -28,7 +33,8 @@ workflow {{ short_name|upper }} { main: ch_versions = Channel.empty() - {% if multiqc %}ch_multiqc_files = Channel.empty(){% endif %} + {%- if multiqc %} + ch_multiqc_files = Channel.empty(){% endif %} {%- if fastqc %} // @@ -47,7 +53,7 @@ workflow {{ short_name|upper }} { softwareVersionsToYAML(ch_versions) .collectFile( storeDir: "${params.outdir}/pipeline_info", - name: {% if is_nfcore %}'nf_core_' {% else %} '' {% endif %} + 'pipeline_software_' + {% if multiqc %} 'mqc_' {% else %} '' {% endif %} + 'versions.yml', + name: {% if is_nfcore %}'nf_core_' + {% endif %} '{{ short_name }}_software_' {% if multiqc %} + 'mqc_' {% endif %} + 'versions.yml', sort: true, newLine: true ).set { ch_collated_versions } @@ -65,13 +71,14 @@ workflow {{ short_name|upper }} { Channel.fromPath(params.multiqc_logo, checkIfExists: true) : Channel.empty() - {% if nf_schema %} + {%- if nf_schema %} + summary_params = paramsSummaryMap( workflow, parameters_schema: "nextflow_schema.json") ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) ch_multiqc_files = ch_multiqc_files.mix( ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) - {% endif %} + {%- endif %} {%- if citations %} ch_multiqc_custom_methods_description = params.multiqc_methods_description ? @@ -95,7 +102,9 @@ workflow {{ short_name|upper }} { ch_multiqc_files.collect(), ch_multiqc_config.toList(), ch_multiqc_custom_config.toList(), - ch_multiqc_logo.toList() + ch_multiqc_logo.toList(), + [], + [] ) {% endif %} emit: diff --git a/nf_core/pipelines/bump_version.py b/nf_core/pipelines/bump_version.py index 18aa869328..3190ed70d4 100644 --- a/nf_core/pipelines/bump_version.py +++ b/nf_core/pipelines/bump_version.py @@ -5,9 +5,10 @@ import logging import re from pathlib import Path -from typing import List, Tuple, Union +from typing import List, Optional, Tuple, Union import rich.console +from ruamel.yaml import YAML import nf_core.utils from nf_core.utils import Pipeline @@ -60,6 +61,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/releases/tag/{new_version}", ) ], + yaml_key=["report_comment"], ) if multiqc_current_version != "dev" and multiqc_new_version == "dev": update_file_version( @@ -71,6 +73,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: "/tree/dev", ) ], + yaml_key=["report_comment"], ) if multiqc_current_version == "dev" and multiqc_new_version != "dev": update_file_version( @@ -82,6 +85,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/releases/tag/{multiqc_new_version}", ) ], + yaml_key=["report_comment"], ) update_file_version( Path("assets", "multiqc_config.yml"), @@ -92,6 +96,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: f"/{multiqc_new_version}/", ), ], + yaml_key=["report_comment"], ) # nf-test snap files pipeline_name = pipeline_obj.nf_config.get("manifest.name", "").strip(" '\"") @@ -107,6 +112,20 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: ) ], ) + # .nf-core.yml - pipeline version + # update entry: version: 1.0.0dev, but not `nf_core_version`, or `bump_version` + update_file_version( + ".nf-core.yml", + pipeline_obj, + [ + ( + current_version, + new_version, + ) + ], + required=False, + yaml_key=["template", "version"], + ) def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: @@ -147,10 +166,11 @@ def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: # example: # NXF_VER: # - "20.04.0" - rf"- \"{re.escape(current_version)}\"", - f'- "{new_version}"', + current_version, + new_version, ) ], + yaml_key=["jobs", "test", "strategy", "matrix", "NXF_VER"], ) # README.md - Nextflow version badge @@ -161,70 +181,128 @@ def bump_nextflow_version(pipeline_obj: Pipeline, new_version: str) -> None: ( rf"nextflow%20DSL2-%E2%89%A5{re.escape(current_version)}-23aa62.svg", f"nextflow%20DSL2-%E2%89%A5{new_version}-23aa62.svg", - ), - ( - # example: 1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=20.04.0`) - rf"1\.\s*Install\s*\[`Nextflow`\]\(https:\/\/www\.nextflow\.io\/docs\/latest\/getstarted\.html#installation\)\s*\(`>={re.escape(current_version)}`\)", - f"1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>={new_version}`)", - ), + ) ], ) -def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patterns: List[Tuple[str, str]]) -> None: - """Updates the version number in a requested file. +def update_file_version( + filename: Union[str, Path], + pipeline_obj: Pipeline, + patterns: List[Tuple[str, str]], + required: bool = True, + yaml_key: Optional[List[str]] = None, +) -> None: + """ + Updates a file with a new version number. Args: - filename (str): File to scan. - pipeline_obj (nf_core.pipelines.lint.PipelineLint): A PipelineLint object that holds information - about the pipeline contents and build files. - pattern (str): Regex pattern to apply. - - Raises: - ValueError, if the version number cannot be found. + filename (str): The name of the file to update. + pipeline_obj (nf_core.utils.Pipeline): A `Pipeline` object that holds information + about the pipeline contents. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + required (bool, optional): Whether the file is required to exist. Defaults to `True`. + yaml_key (Optional[List[str]], optional): The YAML key to update. Defaults to `None`. """ - # Load the file - fn = pipeline_obj._fp(filename) - content = "" - try: - with open(fn) as fh: - content = fh.read() - except FileNotFoundError: + fn: Path = pipeline_obj._fp(filename) + + if not fn.exists(): log.warning(f"File not found: '{fn}'") return - replacements = [] - for pattern in patterns: - found_match = False + if yaml_key: + update_yaml_file(fn, patterns, yaml_key, required) + else: + update_text_file(fn, patterns, required) - newcontent = [] - for line in content.splitlines(): - # Match the pattern - matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) - if matches_pattern: - found_match = True - # Replace the match - newline = re.sub(pattern[0], pattern[1], line) - newcontent.append(newline) +def update_yaml_file(fn: Path, patterns: List[Tuple[str, str]], yaml_key: List[str], required: bool): + """ + Updates a YAML file with a new version number. - # Save for logging - replacements.append((line, newline)) + Args: + fn (Path): The name of the file to update. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + yaml_key (List[str]): The YAML key to update. + required (bool): Whether the file is required to exist. + """ + yaml = YAML() + yaml.preserve_quotes = True + with open(fn) as file: + yaml_content = yaml.load(file) + + try: + target = yaml_content + for key in yaml_key[:-1]: + target = target[key] - # No match, keep line as it is + last_key = yaml_key[-1] + current_value = target[last_key] + + new_value = current_value + for pattern, replacement in patterns: + # check if current value is list + if isinstance(current_value, list): + new_value = [re.sub(pattern, replacement, item) for item in current_value] else: - newcontent.append(line) + new_value = re.sub(pattern, replacement, current_value) - if found_match: - content = "\n".join(newcontent) + "\n" - else: - log.error(f"Could not find version number in {filename}: `{pattern}`") + if new_value != current_value: + target[last_key] = new_value + with open(fn, "w") as file: + yaml.dump(yaml_content, file) + log.info(f"Updated version in YAML file '{fn}'") + log_change(str(current_value), str(new_value)) + except KeyError as e: + handle_error(f"Could not find key {e} in the YAML structure of {fn}", required) - log.info(f"Updated version in '{filename}'") - for replacement in replacements: - stderr.print(f" [red] - {replacement[0].strip()}", highlight=False) - stderr.print(f" [green] + {replacement[1].strip()}", highlight=False) - stderr.print("\n") - with open(fn, "w") as fh: - fh.write(content) +def update_text_file(fn: Path, patterns: List[Tuple[str, str]], required: bool): + """ + Updates a text file with a new version number. + + Args: + fn (Path): The name of the file to update. + patterns (List[Tuple[str, str]]): A list of tuples containing the regex patterns to + match and the replacement strings. + required (bool): Whether the file is required to exist. + """ + with open(fn) as file: + content = file.read() + + updated = False + for pattern, replacement in patterns: + new_content, count = re.subn(pattern, replacement, content) + if count > 0: + log_change(content, new_content) + content = new_content + updated = True + log.info(f"Updated version in '{fn}'") + log.debug(f"Replaced pattern '{pattern}' with '{replacement}' {count} times") + elif required: + handle_error(f"Could not find version number in {fn}: `{pattern}`", required) + + if updated: + with open(fn, "w") as file: + file.write(content) + + +def handle_error(message: str, required: bool): + if required: + raise ValueError(message) + else: + log.info(message) + + +def log_change(old_content: str, new_content: str): + old_lines = old_content.splitlines() + new_lines = new_content.splitlines() + + for old_line, new_line in zip(old_lines, new_lines): + if old_line != new_line: + stderr.print(f" [red] - {old_line.strip()}", highlight=False) + stderr.print(f" [green] + {new_line.strip()}", highlight=False) + + stderr.print("\n") diff --git a/nf_core/pipelines/create/__init__.py b/nf_core/pipelines/create/__init__.py index 8b0edf34cf..6a610ccccb 100644 --- a/nf_core/pipelines/create/__init__.py +++ b/nf_core/pipelines/create/__init__.py @@ -1,14 +1,11 @@ """A Textual app to create a pipeline.""" import logging -from pathlib import Path import click -import yaml from textual.app import App -from textual.widgets import Button +from textual.widgets import Button, Switch -import nf_core from nf_core.pipelines.create import utils from nf_core.pipelines.create.basicdetails import BasicDetails from nf_core.pipelines.create.custompipeline import CustomPipeline @@ -41,11 +38,12 @@ class PipelineCreateApp(App[utils.CreateConfig]): """A Textual app to manage stopwatches.""" CSS_PATH = "create.tcss" - TITLE = "nf-core create" + TITLE = "nf-core pipelines create" SUB_TITLE = "Create a new pipeline with the nf-core pipeline template" BINDINGS = [ ("d", "toggle_dark", "Toggle dark mode"), ("q", "quit", "Quit"), + ("a", "toggle_all", "Toggle all"), ] SCREENS = { "welcome": WelcomeScreen(), @@ -105,3 +103,14 @@ def on_button_pressed(self, event: Button.Pressed) -> None: def action_toggle_dark(self) -> None: """An action to toggle dark mode.""" self.dark: bool = not self.dark + + def action_toggle_all(self) -> None: + """An action to toggle all Switches.""" + switches = self.query(Switch) + if not switches: + return # No Switches widgets found + # Determine the new state based on the first switch + new_state = not switches.first().value if switches.first() else True + for switch in switches: + switch.value = new_state + self.refresh() diff --git a/nf_core/pipelines/create/basicdetails.py b/nf_core/pipelines/create/basicdetails.py index 09484fa2ea..2bd2ea1c79 100644 --- a/nf_core/pipelines/create/basicdetails.py +++ b/nf_core/pipelines/create/basicdetails.py @@ -69,7 +69,7 @@ def compose(self) -> ComposeResult: @on(Input.Submitted) def show_exists_warn(self): """Check if the pipeline exists on every input change or submitted. - If the pipeline exists, show warning message saying that it will be overriden.""" + If the pipeline exists, show warning message saying that it will be overridden.""" config = {} for text_input in self.query("TextInput"): this_input = text_input.query_one(Input) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8e1d46c690..61e0b63ec3 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,7 +8,7 @@ import re import shutil from pathlib import Path -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Tuple, Union import git import git.config @@ -21,7 +21,8 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.utils import LintConfigType, NFCoreTemplateConfig +from nf_core.pipelines.rocrate import ROCrate +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlLintConfig log = logging.getLogger(__name__) @@ -67,7 +68,7 @@ def __init__( _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) # Obtain a CreateConfig object from `.nf-core.yml` config file if config_yml is not None and getattr(config_yml, "template", None) is not None: - self.config = CreateConfig(**config_yml["template"].model_dump()) + self.config = CreateConfig(**config_yml["template"].model_dump(exclude_none=True)) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") # Update the output directory @@ -182,7 +183,7 @@ def update_config(self, organisation, version, force, outdir): self.config.force = force if force else False if self.config.outdir is None: self.config.outdir = outdir if outdir else "." - if self.config.is_nfcore is None: + if self.config.is_nfcore is None or self.config.is_nfcore == "null": self.config.is_nfcore = self.config.org == "nf-core" def obtain_jinja_params_dict( @@ -205,7 +206,7 @@ def obtain_jinja_params_dict( config_yml = None # Set the parameters for the jinja template - jinja_params = self.config.model_dump() + jinja_params = self.config.model_dump(exclude_none=True) # Add template areas to jinja params and create list of areas with paths to skip skip_areas = [] @@ -356,6 +357,11 @@ def render_template(self) -> None: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() + if self.config.skip_features is None or "ro-crate" not in self.config.skip_features: + # Create the RO-Crate metadata file + rocrate_obj = ROCrate(self.outdir) + rocrate_obj.create_rocrate(json_path=self.outdir / "ro-crate-metadata.json") + # Update the .nf-core.yml with linting configurations self.fix_linting() @@ -363,14 +369,12 @@ def render_template(self) -> None: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - self.config.outdir = str(self.config.outdir) - config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - yaml.safe_dump(config_yml.model_dump(), fh) + config_yml.template = NFCoreTemplateConfig(**self.config.model_dump(exclude_none=True)) + yaml.safe_dump(config_yml.model_dump(exclude_none=True), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") - run_prettier_on_file(self.outdir / config_fn) # Run prettier on files - run_prettier_on_file(self.outdir) + run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) def fix_linting(self): """ @@ -397,11 +401,9 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and nf_core_yml is not None: - nf_core_yml.lint = cast(LintConfigType, lint_config) + nf_core_yml.lint = NFCoreYamlLintConfig(**lint_config) with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) - - run_prettier_on_file(Path(self.outdir, config_fn)) + yaml.dump(nf_core_yml.model_dump(exclude_none=True), fh, default_flow_style=False, sort_keys=False) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" diff --git a/nf_core/pipelines/create/custompipeline.py b/nf_core/pipelines/create/custompipeline.py index 5debcfee7f..e433db41ec 100644 --- a/nf_core/pipelines/create/custompipeline.py +++ b/nf_core/pipelines/create/custompipeline.py @@ -2,9 +2,9 @@ from textual import on from textual.app import ComposeResult -from textual.containers import Center, ScrollableContainer +from textual.containers import Center, Horizontal, ScrollableContainer from textual.screen import Screen -from textual.widgets import Button, Footer, Header, Markdown, Switch +from textual.widgets import Button, Footer, Header, Markdown, Static, Switch from nf_core.pipelines.create.utils import PipelineFeature @@ -22,7 +22,13 @@ def compose(self) -> ComposeResult: """ ) ) + yield Horizontal( + Switch(id="toggle_all", value=True), + Static("Toggle all features", classes="feature_title"), + classes="custom_grid", + ) yield ScrollableContainer(id="features") + yield Center( Button("Back", id="back", variant="default"), Button("Continue", id="continue", variant="success"), @@ -35,6 +41,7 @@ def on_mount(self) -> None: self.query_one("#features").mount( PipelineFeature(feature["help_text"], feature["short_description"], feature["description"], name) ) + self.query_one("#toggle_all", Switch).value = True @on(Button.Pressed, "#continue") def on_button_pressed(self, event: Button.Pressed) -> None: @@ -45,3 +52,10 @@ def on_button_pressed(self, event: Button.Pressed) -> None: if not this_switch.value: skip.append(this_switch.id) self.parent.TEMPLATE_CONFIG.__dict__.update({"skip_features": skip, "is_nfcore": False}) + + @on(Switch.Changed, "#toggle_all") + def on_toggle_all(self, event: Switch.Changed) -> None: + """Handle toggling all switches.""" + new_state = event.value + for feature in self.query("PipelineFeature"): + feature.query_one(Switch).value = new_state diff --git a/nf_core/pipelines/create/finaldetails.py b/nf_core/pipelines/create/finaldetails.py index bd15cf9ddd..dad81689a9 100644 --- a/nf_core/pipelines/create/finaldetails.py +++ b/nf_core/pipelines/create/finaldetails.py @@ -85,7 +85,7 @@ def on_button_pressed(self, event: Button.Pressed) -> None: @on(Input.Submitted) def show_exists_warn(self): """Check if the pipeline exists on every input change or submitted. - If the pipeline exists, show warning message saying that it will be overriden.""" + If the pipeline exists, show warning message saying that it will be overridden.""" outdir = "" for text_input in self.query("TextInput"): this_input = text_input.query_one(Input) diff --git a/nf_core/pipelines/create/githubrepo.py b/nf_core/pipelines/create/githubrepo.py index 99e7b09ab8..b37dfb6170 100644 --- a/nf_core/pipelines/create/githubrepo.py +++ b/nf_core/pipelines/create/githubrepo.py @@ -67,7 +67,7 @@ def compose(self) -> ComposeResult: yield TextInput( "repo_org", "Organisation name", - "The name of the organisation where the GitHub repo will be cretaed", + "The name of the organisation where the GitHub repo will be created", default=self.parent.TEMPLATE_CONFIG.org, classes="column", ) diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index b59a2e51a6..9841879e83 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -60,6 +60,7 @@ ci: igenomes: skippable_paths: - "conf/igenomes.config" + - "conf/igenomes_ignored.config" short_description: "Use reference genomes" description: "The pipeline will be configured to use a copy of the most common reference genome files from iGenomes" help_text: | @@ -76,6 +77,7 @@ igenomes: linting: files_exist: - "conf/igenomes.config" + - "conf/igenomes_ignored.config" nfcore_pipelines: True custom_pipelines: True github_badges: @@ -99,7 +101,7 @@ github_badges: nf_core_configs: skippable_paths: False short_description: "Add configuration files" - description: "The pipeline will include configuration profiles containing custom parameters requried to run nf-core pipelines at different institutions" + description: "The pipeline will include configuration profiles containing custom parameters required to run nf-core pipelines at different institutions" help_text: | Nf-core has a repository with a collection of configuration profiles. @@ -118,6 +120,7 @@ nf_core_configs: - "custom_config" - "params.custom_config_version" - "params.custom_config_base" + included_configs: False nfcore_pipelines: False custom_pipelines: True is_nfcore: @@ -183,6 +186,7 @@ code_linters: citations: skippable_paths: - "assets/methods_description_template.yml" + - "CITATIONS.md" short_description: "Include citations" description: "Include pipeline tools citations in CITATIONS.md and a method description in the MultiQC report (if enabled)." help_text: | @@ -190,6 +194,9 @@ citations: Additionally, it will include a YAML file (`assets/methods_description_template.yml`) to add a Materials & Methods section describing the tools used in the pieline, and the logics to add this section to the output MultiQC report (if the report is generated). + linting: + files_exist: + - "CITATIONS.md" nfcore_pipelines: False custom_pipelines: True gitpod: @@ -425,3 +432,27 @@ seqera_platform: You can extend this file adding any other desired configuration. nfcore_pipelines: False custom_pipelines: True +rocrate: + skippable_paths: + - "ro-crate-metadata.json" + short_description: "Add RO-Crate metadata" + description: "Add a RO-Crate metadata file to describe the pipeline" + help_text: | + RO-Crate is a metadata specification to describe research data and software. + This will add a `ro-crate-metadata.json` file to describe the pipeline. + nfcore_pipelines: False + custom_pipelines: True + linting: + files_warn: + - "ro-crate-metadata.json" +vscode: + skippable_paths: + - ".vscode" + short_description: "Render website admonitions in VSCode" + description: "Add a VSCode configuration to render website admonitions" + help_text: | + This will add a VSCode configuration file to render the admonitions in markdown files with the same style as the nf-core website. + + Adds the `.vscode` directory to the pipelinerepository. + nfcore_pipelines: False + custom_pipelines: True diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 97453b127e..d37dce86d1 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -133,10 +133,8 @@ def __init__( self.force = force self.platform = platform self.fullname: Optional[str] = None - # if flag is not specified, do not assume deliberate choice and prompt config inclusion interactively. - # this implies that non-interactive "no" choice is only possible implicitly (e.g. with --platform or if prompt is suppressed by !stderr.is_interactive). - # only alternative would have been to make it a parameter with argument, e.g. -d="yes" or -d="no". - self.include_configs = True if download_configuration else False if bool(platform) else None + # downloading configs is not supported for Seqera Platform downloads. + self.include_configs = True if download_configuration == "yes" and not bool(platform) else False # Additional tags to add to the downloaded pipeline. This enables to mark particular commits or revisions with # additional tags, e.g. "stable", "testing", "validated", "production" etc. Since this requires a git-repo, it is only # available for the bare / Seqera Platform download. @@ -748,7 +746,7 @@ def find_container_images(self, workflow_directory: str) -> None: self.nf_config is needed, because we need to restart search over raw input if no proper container matches are found. """ - config_findings.append((k, v.strip('"').strip("'"), self.nf_config, "Nextflow configs")) + config_findings.append((k, v.strip("'\""), self.nf_config, "Nextflow configs")) # rectify the container paths found in the config # Raw config_findings may yield multiple containers, so better create a shallow copy of the list, since length of input and output may be different ?!? @@ -841,11 +839,12 @@ def rectify_raw_container_matches(self, raw_findings): url_regex = ( r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" ) + oras_regex = r"oras:\/\/[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/39672069/713980 docker_regex = r"^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(? List[str]: """ Helper function that takes a list of container images (URLs and Docker URIs), eliminates all Docker URIs for which also a URL is contained and returns the @@ -995,13 +994,74 @@ def prioritize_direct_download(self, container_list): we want to keep it and not replace with with whatever we have now (which might be the Docker URI). A regex that matches http, r"^$|^http" could thus be used to prioritize the Docker URIs over http Downloads + + We also need to handle a special case: The https:// Singularity downloads from Seqera Containers all end in 'data', although + they are not equivalent, e.g.: + + 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data' + 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' + + Lastly, we want to remove at least a few Docker URIs for those modules, that have an oras:// download link. """ - d = {} + d: Dict[str, str] = {} + seqera_containers_http: List[str] = [] + seqera_containers_oras: List[str] = [] + all_others: List[str] = [] + for c in container_list: + if bool(re.search(r"/data$", c)): + seqera_containers_http.append(c) + elif bool(re.search(r"^oras://", c)): + seqera_containers_oras.append(c) + else: + all_others.append(c) + + for c in all_others: if re.match(r"^$|(?!^http)", d.get(k := re.sub(".*/(.*)", "\\1", c), "")): log.debug(f"{c} matches and will be saved as {k}") d[k] = c - return sorted(list(d.values())) + + combined_with_oras = self.reconcile_seqera_container_uris(seqera_containers_oras, list(d.values())) + + # combine deduplicated others (Seqera containers oras, http others and Docker URI others) and Seqera containers http + return sorted(list(set(combined_with_oras + seqera_containers_http))) + + @staticmethod + def reconcile_seqera_container_uris(prioritized_container_list: List[str], other_list: List[str]) -> List[str]: + """ + Helper function that takes a list of Seqera container URIs, + extracts the software string and builds a regex from them to filter out + similar containers from the second container list. + + prioritzed_container_list = [ + ... "oras://community.wave.seqera.io/library/multiqc:1.25.1--f0e743d16869c0bf", + ... "oras://community.wave.seqera.io/library/multiqc_pip_multiqc-plugins:e1f4877f1515d03c" + ... ] + + will be cleaned to + + ['library/multiqc:1.25.1', 'library/multiqc_pip_multiqc-plugins'] + + Subsequently, build a regex from those and filter out matching duplicates in other_list: + """ + if not prioritized_container_list: + return other_list + else: + # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns + trimmed_priority_list = [ + match.group() + for c in set(prioritized_container_list) + if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) + ] + + # build regex + prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) + + # filter out matches in other list + filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] + + # combine prioritized and regular container lists + return sorted(list(set(prioritized_container_list + filtered_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. @@ -1025,7 +1085,13 @@ def gather_registries(self, workflow_directory: str) -> None: self.registry_set.add(self.nf_config[registry]) # add depot.galaxyproject.org to the set, because it is the default registry for singularity hardcoded in modules - self.registry_set.add("depot.galaxyproject.org") + self.registry_set.add("depot.galaxyproject.org/singularity") + + # add community.wave.seqera.io/library to the set to support the new Seqera Docker container registry + self.registry_set.add("community.wave.seqera.io/library") + + # add chttps://community-cr-prod.seqera.io/docker/registry/v2/ to the set to support the new Seqera Singularity container registry + self.registry_set.add("community-cr-prod.seqera.io/docker/registry/v2") def symlink_singularity_images(self, image_out_path: str) -> None: """Create a symlink for each registry in the registry set that points to the image. @@ -1042,10 +1108,13 @@ def symlink_singularity_images(self, image_out_path: str) -> None: if self.registry_set: # Create a regex pattern from the set, in case trimming is needed. - trim_pattern = "|".join(f"^{re.escape(registry)}-?" for registry in self.registry_set) + trim_pattern = "|".join(f"^{re.escape(registry)}-?".replace("/", "[/-]") for registry in self.registry_set) for registry in self.registry_set: - if not os.path.basename(image_out_path).startswith(registry): + # Nextflow will convert it like this as well, so we need it mimic its behavior + registry = registry.replace("/", "-") + + if not bool(re.search(trim_pattern, os.path.basename(image_out_path))): symlink_name = os.path.join("./", f"{registry}-{os.path.basename(image_out_path)}") else: trimmed_name = re.sub(f"{trim_pattern}", "", os.path.basename(image_out_path)) @@ -1265,7 +1334,7 @@ def singularity_image_filenames(self, container: str) -> Tuple[str, Optional[str # if docker.registry / singularity.registry are set to empty strings at runtime, which can be included in the HPC config profiles easily. if self.registry_set: # Create a regex pattern from the set of registries - trim_pattern = "|".join(f"^{re.escape(registry)}-?" for registry in self.registry_set) + trim_pattern = "|".join(f"^{re.escape(registry)}-?".replace("/", "[/-]") for registry in self.registry_set) # Use the pattern to trim the string out_name = re.sub(f"{trim_pattern}", "", out_name) @@ -1347,9 +1416,10 @@ def singularity_download_image( log.debug(f"Copying {container} from cache: '{os.path.basename(out_path)}'") progress.update(task, description="Copying from cache to target directory") shutil.copyfile(cache_path, out_path) + self.symlink_singularity_images(cache_path) # symlinks inside the cache directory # Create symlinks to ensure that the images are found even with different registries being used. - self.symlink_singularity_images(output_path) + self.symlink_singularity_images(out_path) progress.remove_task(task) @@ -1393,9 +1463,10 @@ def singularity_pull_image( # Sometimes, container still contain an explicit library specification, which # resulted in attempted pulls e.g. from docker://quay.io/quay.io/qiime2/core:2022.11 # Thus, if an explicit registry is specified, the provided -l value is ignored. + # Additionally, check if the container to be pulled is native Singularity: oras:// protocol. container_parts = container.split("/") if len(container_parts) > 2: - address = f"docker://{container}" + address = container if container.startswith("oras://") else f"docker://{container}" absolute_URI = True else: address = f"docker://{library}/{container.replace('docker://', '')}" @@ -1458,9 +1529,10 @@ def singularity_pull_image( log.debug(f"Copying {container} from cache: '{os.path.basename(out_path)}'") progress.update(task, current_log="Copying from cache to target directory") shutil.copyfile(cache_path, out_path) + self.symlink_singularity_images(cache_path) # symlinks inside the cache directory # Create symlinks to ensure that the images are found even with different registries being used. - self.symlink_singularity_images(output_path) + self.symlink_singularity_images(out_path) progress.remove_task(task) @@ -1694,7 +1766,7 @@ def tidy_tags_and_branches(self): self.repo.create_head("latest", "latest") # create a new head for latest self.checkout("latest") else: - # desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns. + # desired revisions may contain arbitrary branch names that do not correspond to valid semantic versioning patterns. valid_versions = [ Version(v) for v in desired_revisions if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) ] @@ -1816,6 +1888,9 @@ def __init__( elif re.search(r"manifest\sunknown", line): self.error_type = self.InvalidTagError(self) break + elif re.search(r"ORAS\sSIF\simage\sshould\shave\sa\ssingle\slayer", line): + self.error_type = self.NoSingularityContainerError(self) + break elif re.search(r"Image\sfile\salready\sexists", line): self.error_type = self.ImageExistsError(self) break @@ -1880,6 +1955,17 @@ def __init__(self, error_log): self.helpmessage = f'Saving image of "{self.error_log.container}" failed, because "{self.error_log.out_path}" exists.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' super().__init__(self.message) + class NoSingularityContainerError(RuntimeError): + """The container image is no native Singularity Image Format.""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = ( + f'[bold red]"{self.error_log.container}" is no valid Singularity Image Format container.[/]\n' + ) + self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert from Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" + super().__init__(self.message) + class OtherError(RuntimeError): """Undefined error with the container""" diff --git a/nf_core/pipelines/launch.py b/nf_core/pipelines/launch.py index a80639ea94..aab0ec4287 100644 --- a/nf_core/pipelines/launch.py +++ b/nf_core/pipelines/launch.py @@ -276,7 +276,7 @@ def merge_nxf_flag_schema(self): self.schema_obj.schema["definitions"] = {} self.schema_obj.schema["definitions"].update(self.nxf_flag_schema) self.schema_obj.schema["allOf"].insert(0, {"$ref": "#/definitions/coreNextflow"}) - # Add the new defintion to the allOf key so that it's included in validation + # Add the new definition to the allOf key so that it's included in validation # Put it at the start of the list so that it comes first def prompt_web_gui(self): @@ -316,7 +316,7 @@ def launch_web_gui(self): raise AssertionError('"api_url" not in web_response') if "web_url" not in web_response: raise AssertionError('"web_url" not in web_response') - # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatability. + # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatibility. if web_response["status"] != "recieved": raise AssertionError( f'web_response["status"] should be "recieved", but it is "{web_response["status"]}"' @@ -434,7 +434,7 @@ def prompt_param(self, param_id, param_obj, is_required, answers): question = self.single_param_to_questionary(param_id, param_obj, answers) answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) - # If required and got an empty reponse, ask again + # If required and got an empty response, ask again while isinstance(answer[param_id], str) and answer[param_id].strip() == "" and is_required: log.error(f"'--{param_id}' is required") answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style) @@ -457,7 +457,7 @@ def prompt_group(self, group_id, group_obj): Prompt for edits to a group of parameters (subschema in 'definitions') Args: - group_id: Paramater ID (string) + group_id: Parameter ID (string) group_obj: JSON Schema keys (dict) Returns: diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 6d27351b62..154e38aea6 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -27,8 +27,8 @@ from nf_core import __version__ from nf_core.components.lint import ComponentLint from nf_core.pipelines.lint_utils import console +from nf_core.utils import NFCoreYamlConfig, NFCoreYamlLintConfig, strip_ansi_codes from nf_core.utils import plural_s as _s -from nf_core.utils import strip_ansi_codes from .actions_awsfulltest import actions_awsfulltest from .actions_awstest import actions_awstest @@ -37,6 +37,7 @@ from .configs import base_config, modules_config from .files_exist import files_exist from .files_unchanged import files_unchanged +from .included_configs import included_configs from .merge_markers import merge_markers from .modules_json import modules_json from .modules_structure import modules_structure @@ -101,6 +102,7 @@ class PipelineLint(nf_core.utils.Pipeline): system_exit = system_exit template_strings = template_strings version_consistency = version_consistency + included_configs = included_configs def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False @@ -110,7 +112,7 @@ def __init__( # Initialise the parent object super().__init__(wf_path) - self.lint_config = {} + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.release_mode = release_mode self.fail_ignored = fail_ignored self.fail_warned = fail_warned @@ -152,7 +154,7 @@ def _get_all_lint_tests(release_mode): "base_config", "modules_config", "nfcore_yml", - ] + (["version_consistency"] if release_mode else []) + ] + (["version_consistency", "included_configs"] if release_mode else []) def _load(self) -> bool: """Load information about the pipeline into the PipelineLint object""" @@ -171,13 +173,12 @@ def _load_lint_config(self) -> bool: Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.wf_path) - self.lint_config = getattr(tools_config, "lint", {}) or {} + self.lint_config = getattr(tools_config, "lint", None) or None is_correct = True - # Check if we have any keys that don't match lint test names if self.lint_config is not None: - for k in self.lint_config: - if k != "nfcore_components" and k not in self.lint_tests: + for k, v in self.lint_config: + if v is not None and k != "nfcore_components" and k not in self.lint_tests: # nfcore_components is an exception to allow custom pipelines without nf-core components log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") is_correct = False @@ -592,7 +593,7 @@ def run_linting( lint_obj._load_lint_config() lint_obj.load_pipeline_config() - if "nfcore_components" in lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: + if lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: module_lint_obj = None subworkflow_lint_obj = None else: @@ -677,5 +678,4 @@ def run_linting( if len(lint_obj.failed) > 0: if release_mode: log.info("Reminder: Lint tests were run in --release mode.") - return lint_obj, module_lint_obj, subworkflow_lint_obj diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index bd25ff33d0..19c2498263 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -66,6 +66,7 @@ def files_exist(self) -> Dict[str, List[str]]: conf/igenomes.config .github/workflows/awstest.yml .github/workflows/awsfulltest.yml + ro-crate-metadata.json Files that *must not* be present, due to being renamed or removed in the template: @@ -167,9 +168,11 @@ def files_exist(self) -> Dict[str, List[str]]: [Path("assets", "multiqc_config.yml")], [Path("conf", "base.config")], [Path("conf", "igenomes.config")], + [Path("conf", "igenomes_ignored.config")], [Path(".github", "workflows", "awstest.yml")], [Path(".github", "workflows", "awsfulltest.yml")], [Path("modules.json")], + [Path("ro-crate-metadata.json")], ] # List of strings. Fails / warns if any of the strings exist. @@ -197,6 +200,12 @@ def files_exist(self) -> Dict[str, List[str]]: ] files_warn_ifexists = [Path(".travis.yml")] + files_hint = [ + [ + ["ro-crate-metadata.json"], + ". Run `nf-core rocrate` to generate this file. Read more about RO-Crates in the [nf-core/tools docs](https://nf-co.re/tools#create-a-ro-crate-metadata-file).", + ], + ] # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] @@ -224,7 +233,11 @@ def pf(file_path: Union[str, Path]) -> Path: if any([pf(f).is_file() for f in files]): passed.append(f"File found: {self._wrap_quotes(files)}") else: - warned.append(f"File not found: {self._wrap_quotes(files)}") + hint = "" + for file_hint in files_hint: + if file_hint[0] == files: + hint = str(file_hint[1]) + warned.append(f"File not found: {self._wrap_quotes(files)}{hint}") # Files that cause an error if they exist for file in files_fail_ifexists: diff --git a/nf_core/pipelines/lint/included_configs.py b/nf_core/pipelines/lint/included_configs.py new file mode 100644 index 0000000000..75c4594f41 --- /dev/null +++ b/nf_core/pipelines/lint/included_configs.py @@ -0,0 +1,36 @@ +from pathlib import Path + + +def included_configs(self): + """Check that the pipeline nextflow.config includes the pipeline custom configs. + + If the include line is uncommented, the test passes. + If the include line is commented, the test fails. + If the include line is missing, the test warns. + + Can be skipped by adding the following to the .nf-core.yml file: + lint: + included_configs: False + """ + passed = [] + failed = [] + warned = [] + + config_file = Path(self.wf_path / "nextflow.config") + + with open(config_file) as fh: + config = fh.read() + if ( + f"// includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + failed.append("Pipeline config does not include custom configs. Please uncomment the includeConfig line.") + elif ( + f"includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? \"${{params.custom_config_base}}/pipeline/{self.pipeline_name}.config\"" + in config + ): + passed.append("Pipeline config includes custom configs.") + else: + warned.append("Pipeline config does not include custom configs. Please add the includeConfig line.") + + return {"passed": passed, "failed": failed, "warned": warned} diff --git a/nf_core/pipelines/lint/multiqc_config.py b/nf_core/pipelines/lint/multiqc_config.py index 2b0fc7902e..fec5b518e3 100644 --- a/nf_core/pipelines/lint/multiqc_config.py +++ b/nf_core/pipelines/lint/multiqc_config.py @@ -31,6 +31,15 @@ def multiqc_config(self) -> Dict[str, List[str]]: lint: multiqc_config: False + To disable this test only for specific sections, you can specify a list of section names. + For example: + + .. code-block:: yaml + lint: + multiqc_config: + - report_section_order + - report_comment + """ passed: List[str] = [] diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 790fc21797..6ae55501b2 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -80,6 +80,9 @@ def nextflow_config(self) -> Dict[str, List[str]]: * ``params.nf_required_version``: The old method for specifying the minimum Nextflow version. Replaced by ``manifest.nextflowVersion`` * ``params.container``: The old method for specifying the dockerhub container address. Replaced by ``process.container`` * ``igenomesIgnore``: Changed to ``igenomes_ignore`` + * ``params.max_cpus``: Old method of specifying the maximum number of CPUs a process can request. Replaced by native Nextflow `resourceLimits`directive in config files. + * ``params.max_memory``: Old method of specifying the maximum number of memory can request. Replaced by native Nextflow `resourceLimits`directive. + * ``params.max_time``: Old method of specifying the maximum number of CPUs can request. Replaced by native Nextflow `resourceLimits`directive. .. tip:: The ``snake_case`` convention should now be used when defining pipeline parameters @@ -146,7 +149,13 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["params.input"], ] # Throw a warning if these are missing - config_warn = [["manifest.mainScript"], ["timeline.file"], ["trace.file"], ["report.file"], ["dag.file"]] + config_warn = [ + ["manifest.mainScript"], + ["timeline.file"], + ["trace.file"], + ["report.file"], + ["dag.file"], + ] # Old depreciated vars - fail if present config_fail_ifdefined = [ "params.nf_required_version", @@ -155,6 +164,9 @@ def nextflow_config(self) -> Dict[str, List[str]]: "params.igenomesIgnore", "params.name", "params.enable_conda", + "params.max_cpus", + "params.max_memory", + "params.max_time", ] # Lint for plugins @@ -334,7 +346,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: failed.append(f"Config `params.custom_config_base` is not set to `{custom_config_base}`") # Check that lines for loading custom profiles exist - lines = [ + old_lines = [ r"// Load nf-core custom profiles from different Institutions", r"try {", r'includeConfig "${params.custom_config_base}/nfcore_custom.config"', @@ -342,11 +354,19 @@ def nextflow_config(self) -> Dict[str, List[str]]: r'System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")', r"}", ] + lines = [ + r"// Load nf-core custom profiles from different Institutions", + r'''includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null"''', + ] path = Path(self.wf_path, "nextflow.config") i = 0 with open(path) as f: for line in f: - if lines[i] in line: + if old_lines[i] in line: + i += 1 + if i == len(old_lines): + break + elif lines[i] in line: i += 1 if i == len(lines): break @@ -354,6 +374,12 @@ def nextflow_config(self) -> Dict[str, List[str]]: i = 0 if i == len(lines): passed.append("Lines for loading custom profiles found") + elif i == len(old_lines): + failed.append( + "Old lines for loading custom profiles found. File should contain: ```groovy\n{}".format( + "\n".join(lines) + ) + ) else: lines[2] = f"\t{lines[2]}" lines[4] = f"\t{lines[4]}" @@ -439,6 +465,7 @@ def nextflow_config(self) -> Dict[str, List[str]]: f"Config default value incorrect: `{param}` is set as {self._wrap_quotes(schema_default)} in `nextflow_schema.json` but is {self._wrap_quotes(self.nf_config[param])} in `nextflow.config`." ) else: + schema_default = str(schema.schema_defaults[param_name]) failed.append( f"Default value from the Nextflow schema `{param} = {self._wrap_quotes(schema_default)}` not found in `nextflow.config`." ) diff --git a/nf_core/pipelines/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py index e0d5fb2005..3395696d1d 100644 --- a/nf_core/pipelines/lint/nfcore_yml.py +++ b/nf_core/pipelines/lint/nfcore_yml.py @@ -1,7 +1,8 @@ -import re from pathlib import Path from typing import Dict, List +from ruamel.yaml import YAML + from nf_core import __version__ REPOSITORY_TYPES = ["pipeline", "modules"] @@ -26,21 +27,23 @@ def nfcore_yml(self) -> Dict[str, List[str]]: failed: List[str] = [] ignored: List[str] = [] + yaml = YAML() + # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get(".nf-core", []) if self.lint_config is not None else [] - try: - with open(Path(self.wf_path, ".nf-core.yml")) as fh: - content = fh.read() - except FileNotFoundError: - with open(Path(self.wf_path, ".nf-core.yaml")) as fh: - content = fh.read() + for ext in (".yml", ".yaml"): + try: + nf_core_yml = yaml.load(Path(self.wf_path) / f".nf-core{ext}") + break + except FileNotFoundError: + continue + else: + raise FileNotFoundError("No `.nf-core.yml` file found.") if "repository_type" not in ignore_configs: # Check that the repository type is set in the .nf-core.yml - repo_type_re = r"repository_type: (.+)" - match = re.search(repo_type_re, content) - if match: - repo_type = match.group(1) + if "repository_type" in nf_core_yml: + repo_type = nf_core_yml["repository_type"] if repo_type not in REPOSITORY_TYPES: failed.append( f"Repository type in `.nf-core.yml` is not valid. " @@ -55,10 +58,8 @@ def nfcore_yml(self) -> Dict[str, List[str]]: if "nf_core_version" not in ignore_configs: # Check that the nf-core version is set in the .nf-core.yml - nf_core_version_re = r"nf_core_version: (.+)" - match = re.search(nf_core_version_re, content) - if match: - nf_core_version = match.group(1).strip('"') + if "nf_core_version" in nf_core_yml: + nf_core_version = nf_core_yml["nf_core_version"] if nf_core_version != __version__ and "dev" not in nf_core_version: warned.append( f"nf-core version in `.nf-core.yml` is not set to the latest version. " diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index 1c09104258..75b05f16ed 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -23,6 +23,21 @@ def readme(self): * If pipeline is released but still contains a 'zenodo.XXXXXXX' tag, the test fails + To disable this test, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + lint: + readme: False + + To disable subsets of these tests, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + + lint: + readme: + - nextflow_badge + - zenodo_release + """ passed = [] warned = [] @@ -36,7 +51,7 @@ def readme(self): if "nextflow_badge" not in ignore_configs: # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.10.0-23aa62.svg)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 37a1f64daf..0cb669e553 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -5,7 +5,7 @@ def template_strings(self): """Check for template placeholders. - The ``nf-core create`` pipeline template uses + The ``nf-core pipelines create`` pipeline template uses `Jinja `_ behind the scenes. This lint test fails if any Jinja template variables such as @@ -39,8 +39,8 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] - files = self.list_files() + files = self.list_files() # Loop through files, searching for string num_matches = 0 for fn in files: diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index ff65fb0e56..b4c56c6007 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -2,9 +2,10 @@ import logging import subprocess from pathlib import Path -from typing import List +from typing import List, Union import rich +import yaml from rich.console import Console from rich.table import Table @@ -69,7 +70,7 @@ def print_fixes(lint_obj): ) -def run_prettier_on_file(file): +def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: """Run the pre-commit hook prettier on a file. Args: @@ -80,12 +81,15 @@ def run_prettier_on_file(file): """ nf_core_pre_commit_config = Path(nf_core.__file__).parent / ".pre-commit-prettier-config.yaml" + args = ["pre-commit", "run", "--config", str(nf_core_pre_commit_config), "prettier"] + if isinstance(file, List): + args.extend(["--files", *file]) + else: + args.extend(["--files", str(file)]) + try: - subprocess.run( - ["pre-commit", "run", "--config", nf_core_pre_commit_config, "prettier", "--files", file], - capture_output=True, - check=True, - ) + subprocess.run(args, capture_output=True, check=True) + log.debug(f"${subprocess.STDOUT}") except subprocess.CalledProcessError as e: if ": SyntaxError: " in e.stdout.decode(): log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") @@ -111,6 +115,18 @@ def dump_json_with_prettier(file_name, file_content): run_prettier_on_file(file_name) +def dump_yaml_with_prettier(file_name: Union[Path, str], file_content: dict) -> None: + """Dump a YAML file and run prettier on it. + + Args: + file_name (Path | str): A file identifier as a string or pathlib.Path. + file_content (dict): Content to dump into the YAML file + """ + with open(file_name, "w") as fh: + yaml.safe_dump(file_content, fh) + run_prettier_on_file(file_name) + + def ignore_file(lint_name: str, file_path: Path, dir_path: Path) -> List[List[str]]: """Ignore a file and add the result to the ignored list. Return the passed, failed, ignored and ignore_configs lists.""" diff --git a/nf_core/pipelines/params_file.py b/nf_core/pipelines/params_file.py index d61b7cfbc8..69326c142d 100644 --- a/nf_core/pipelines/params_file.py +++ b/nf_core/pipelines/params_file.py @@ -2,9 +2,9 @@ import json import logging -import os import textwrap -from typing import Literal, Optional +from pathlib import Path +from typing import Dict, List, Literal, Optional import questionary @@ -27,7 +27,7 @@ ModeLiteral = Literal["both", "start", "end", "none"] -def _print_wrapped(text, fill_char="-", mode="both", width=80, indent=0, drop_whitespace=True): +def _print_wrapped(text, fill_char="-", mode="both", width=80, indent=0, drop_whitespace=True) -> str: """Helper function to format text for the params-file template. Args: @@ -100,7 +100,7 @@ def __init__( self.wfs = nf_core.pipelines.list.Workflows() self.wfs.get_remote_workflows() - def get_pipeline(self): + def get_pipeline(self) -> Optional[bool]: """ Prompt the user for a pipeline name and get the schema """ @@ -124,11 +124,14 @@ def get_pipeline(self): ).unsafe_ask() # Get the schema - self.schema_obj = nf_core.pipelines.schema.PipelineSchema() + self.schema_obj = PipelineSchema() + if self.schema_obj is None: + return False self.schema_obj.get_schema_path(self.pipeline, local_only=False, revision=self.pipeline_revision) self.schema_obj.get_wf_params() + return True - def format_group(self, definition, show_hidden=False): + def format_group(self, definition, show_hidden=False) -> str: """Format a group of parameters of the schema as commented YAML. Args: @@ -167,7 +170,9 @@ def format_group(self, definition, show_hidden=False): return out - def format_param(self, name, properties, required_properties=(), show_hidden=False): + def format_param( + self, name: str, properties: Dict, required_properties: List[str] = [], show_hidden: bool = False + ) -> Optional[str]: """ Format a single parameter of the schema as commented YAML @@ -188,6 +193,9 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal return None description = properties.get("description", "") + if self.schema_obj is None: + log.error("No schema object found") + return "" self.schema_obj.get_schema_defaults() default = properties.get("default") type = properties.get("type") @@ -209,7 +217,7 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal return out - def generate_params_file(self, show_hidden=False): + def generate_params_file(self, show_hidden: bool = False) -> str: """Generate the contents of a parameter template file. Assumes the pipeline has been fetched (if remote) and the schema loaded. @@ -220,6 +228,10 @@ def generate_params_file(self, show_hidden=False): Returns: str: Formatted output for the pipeline schema """ + if self.schema_obj is None: + log.error("No schema object found") + return "" + schema = self.schema_obj.schema pipeline_name = self.schema_obj.pipeline_manifest.get("name", self.pipeline) pipeline_version = self.schema_obj.pipeline_manifest.get("version", "0.0.0") @@ -234,13 +246,13 @@ def generate_params_file(self, show_hidden=False): out += "\n" # Add all parameter groups - for definition in schema.get("definitions", {}).values(): + for definition in schema.get("definitions", schema.get("$defs", {})).values(): out += self.format_group(definition, show_hidden=show_hidden) out += "\n" return out - def write_params_file(self, output_fn="nf-params.yaml", show_hidden=False, force=False): + def write_params_file(self, output_fn: Path = Path("nf-params.yaml"), show_hidden=False, force=False) -> bool: """Build a template file for the pipeline schema. Args: @@ -254,7 +266,9 @@ def write_params_file(self, output_fn="nf-params.yaml", show_hidden=False, force """ self.get_pipeline() - + if self.schema_obj is None: + log.error("No schema object found") + return False try: self.schema_obj.load_schema() self.schema_obj.validate_schema() @@ -265,11 +279,10 @@ def write_params_file(self, output_fn="nf-params.yaml", show_hidden=False, force schema_out = self.generate_params_file(show_hidden=show_hidden) - if os.path.exists(output_fn) and not force: + if output_fn.exists() and not force: log.error(f"File '{output_fn}' exists! Please delete first, or use '--force'") return False - with open(output_fn, "w") as fh: - fh.write(schema_out) - log.info(f"Parameter file written to '{output_fn}'") + output_fn.write_text(schema_out) + log.info(f"Parameter file written to '{output_fn}'") return True diff --git a/nf_core/pipelines/refgenie.py b/nf_core/pipelines/refgenie.py index 426ca5eb7d..46197e9cc8 100644 --- a/nf_core/pipelines/refgenie.py +++ b/nf_core/pipelines/refgenie.py @@ -144,14 +144,14 @@ def update_config(rgc): This function is executed after running 'refgenie pull /' The refgenie config file is transformed into a nextflow.config file, which is used to - overwrited the 'refgenie_genomes.config' file. + overwrite the 'refgenie_genomes.config' file. The path to the target config file is inferred from the following options, in order: - the 'nextflow_config' attribute in the refgenie config file - the NXF_REFGENIE_PATH environment variable - otherwise defaults to: $NXF_HOME/nf-core/refgenie_genomes.config - Additionaly, an 'includeConfig' statement is added to the file $NXF_HOME/config + Additionally, an 'includeConfig' statement is added to the file $NXF_HOME/config """ # Compile nextflow refgenie_genomes.config from refgenie config diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py new file mode 100644 index 0000000000..915f203f00 --- /dev/null +++ b/nf_core/pipelines/rocrate.py @@ -0,0 +1,362 @@ +#!/usr/bin/env python +"""Code to deal with pipeline RO (Research Object) Crates""" + +import logging +import os +import re +import sys +from datetime import datetime +from pathlib import Path +from typing import Optional, Set, Union + +import requests +import rocrate.rocrate +from git import GitCommandError, InvalidGitRepositoryError +from repo2rocrate.nextflow import NextflowCrateBuilder +from rich.progress import BarColumn, Progress +from rocrate.model.person import Person +from rocrate.rocrate import ROCrate as BaseROCrate + +from nf_core.utils import Pipeline + +log = logging.getLogger(__name__) + + +class CustomNextflowCrateBuilder(NextflowCrateBuilder): + DATA_ENTITIES = NextflowCrateBuilder.DATA_ENTITIES + [ + ("docs/usage.md", "File", "Usage documentation"), + ("docs/output.md", "File", "Output documentation"), + ("suborkflows/local", "Dataset", "Pipeline-specific suborkflows"), + ("suborkflows/nf-core", "Dataset", "nf-core suborkflows"), + (".nf-core.yml", "File", "nf-core configuration file, configuring template features and linting rules"), + (".pre-commit-config.yaml", "File", "Configuration file for pre-commit hooks"), + (".prettierignore", "File", "Ignore file for prettier"), + (".prettierrc", "File", "Configuration file for prettier"), + ] + + +def custom_make_crate( + root: Path, + workflow: Optional[Path] = None, + repo_url: Optional[str] = None, + wf_name: Optional[str] = None, + wf_version: Optional[str] = None, + lang_version: Optional[str] = None, + ci_workflow: Optional[str] = "ci.yml", + diagram: Optional[Path] = None, +) -> BaseROCrate: + builder = CustomNextflowCrateBuilder(root, repo_url=repo_url) + + return builder.build( + workflow, + wf_name=wf_name, + wf_version=wf_version, + lang_version=lang_version, + license=None, + ci_workflow=ci_workflow, + diagram=diagram, + ) + + +class ROCrate: + """ + Class to generate an RO Crate for a pipeline + + """ + + def __init__(self, pipeline_dir: Path, version="") -> None: + """ + Initialise the ROCrate object + + Args: + pipeline_dir (Path): Path to the pipeline directory + version (str): Version of the pipeline to checkout + """ + from nf_core.utils import is_pipeline_directory, setup_requests_cachedir + + is_pipeline_directory(pipeline_dir) + self.pipeline_dir = pipeline_dir + self.version: str = version + self.crate: rocrate.rocrate.ROCrate + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + + setup_requests_cachedir() + + def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None) -> bool: + """ + Create an RO Crate for a pipeline + + Args: + outdir (Path): Path to the output directory + json_path (Path): Path to the metadata file + zip_path (Path): Path to the zip file + + """ + + # Check that the checkout pipeline version is the same as the requested version + if self.version != "": + if self.version != self.pipeline_obj.nf_config.get("manifest.version"): + # using git checkout to get the requested version + log.info(f"Checking out pipeline version {self.version}") + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + sys.exit(1) + try: + self.pipeline_obj.repo.git.checkout(self.version) + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + except InvalidGitRepositoryError: + log.error(f"Could not find a git repository in {self.pipeline_dir}") + sys.exit(1) + except GitCommandError: + log.error(f"Could not checkout version {self.version}") + sys.exit(1) + self.version = self.pipeline_obj.nf_config.get("manifest.version", "") + self.make_workflow_rocrate() + + # Save just the JSON metadata file + if json_path is not None: + if json_path.name == "ro-crate-metadata.json": + json_path = json_path.parent + + log.info(f"Saving metadata file to '{json_path}'") + self.crate.metadata.write(json_path) + + # Save the whole crate zip file + if zip_path is not None: + if zip_path.name != "ro-crate.crate.zip": + zip_path = zip_path / "ro-crate.crate.zip" + log.info(f"Saving zip file '{zip_path}") + self.crate.write_zip(zip_path) + + if json_path is None and zip_path is None: + log.error("Please provide a path to save the ro-crate file or the zip file.") + return False + + return True + + def make_workflow_rocrate(self) -> None: + """ + Create an RO Crate for a pipeline + """ + if self.pipeline_obj is None: + raise ValueError("Pipeline object not loaded") + + diagram: Optional[Path] = None + # find files (metro|tube)_?(map)?.png in the pipeline directory or docs/ using pathlib + pattern = re.compile(r".*?(metro|tube|subway)_(map).*?\.png", re.IGNORECASE) + for file in self.pipeline_dir.rglob("*.png"): + if pattern.match(file.name): + log.debug(f"Found diagram: {file}") + diagram = file.relative_to(self.pipeline_dir) + break + + # Create the RO Crate object + + self.crate = custom_make_crate( + self.pipeline_dir, + self.pipeline_dir / "main.nf", + self.pipeline_obj.nf_config.get("manifest.homePage", ""), + self.pipeline_obj.nf_config.get("manifest.name", ""), + self.pipeline_obj.nf_config.get("manifest.version", ""), + self.pipeline_obj.nf_config.get("manifest.nextflowVersion", ""), + diagram=diagram, + ) + + # add readme as description + readme = self.pipeline_dir / "README.md" + + try: + self.crate.description = readme.read_text() + except FileNotFoundError: + log.error(f"Could not find README.md in {self.pipeline_dir}") + # get license from LICENSE file + license_file = self.pipeline_dir / "LICENSE" + try: + license = license_file.read_text() + if license.startswith("MIT"): + self.crate.license = "MIT" + else: + # prompt for license + log.info("Could not determine license from LICENSE file") + self.crate.license = input("Please enter the license for this pipeline: ") + except FileNotFoundError: + log.error(f"Could not find LICENSE file in {self.pipeline_dir}") + + self.crate.add_jsonld( + {"@id": "https://nf-co.re/", "@type": "Organization", "name": "nf-core", "url": "https://nf-co.re/"} + ) + + # Set metadata for main entity file + self.set_main_entity("main.nf") + + def set_main_entity(self, main_entity_filename: str): + """ + Set the main.nf as the main entity of the crate and add necessary metadata + """ + if self.crate.mainEntity is None: + raise ValueError("Main entity not set") + + self.crate.mainEntity.append_to( + "dct:conformsTo", "https://bioschemas.org/profiles/ComputationalWorkflow/1.0-RELEASE/", compact=True + ) + # add dateCreated and dateModified, based on the current data + self.crate.mainEntity.append_to("dateCreated", self.crate.root_dataset.get("dateCreated", ""), compact=True) + self.crate.mainEntity.append_to( + "dateModified", str(datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")), compact=True + ) + self.crate.mainEntity.append_to("sdPublisher", {"@id": "https://nf-co.re/"}, compact=True) + if self.version.endswith("dev"): + url = "dev" + else: + url = self.version + self.crate.mainEntity.append_to( + "url", f"https://nf-co.re/{self.crate.name.replace('nf-core/','')}/{url}/", compact=True + ) + self.crate.mainEntity.append_to("version", self.version, compact=True) + + # get keywords from nf-core website + remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] + # go through all remote workflows and find the one that matches the pipeline name + topics = ["nf-core", "nextflow"] + for remote_wf in remote_workflows: + assert self.pipeline_obj.pipeline_name is not None # mypy + if remote_wf["name"] == self.pipeline_obj.pipeline_name.replace("nf-core/", ""): + topics = topics + remote_wf["topics"] + break + + log.debug(f"Adding topics: {topics}") + self.crate.mainEntity.append_to("keywords", topics) + + self.add_main_authors(self.crate.mainEntity) + + self.crate.mainEntity = self.crate.mainEntity + + self.crate.mainEntity.append_to("license", self.crate.license) + self.crate.mainEntity.append_to("name", self.crate.name) + + if "dev" in self.version: + self.crate.creativeWorkStatus = "InProgress" + else: + self.crate.creativeWorkStatus = "Stable" + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + else: + tags = self.pipeline_obj.repo.tags + if tags: + # get the tag for this version + for tag in tags: + if tag.commit.hexsha == self.pipeline_obj.repo.head.commit.hexsha: + self.crate.mainEntity.append_to( + "dateCreated", + tag.commit.committed_datetime.strftime("%Y-%m-%dT%H:%M:%SZ"), + compact=True, + ) + + def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: + """ + Add workflow authors to the crate + """ + # add author entity to crate + + try: + authors = self.pipeline_obj.nf_config["manifest.author"].split(",") + # remove spaces + authors = [a.strip() for a in authors] + # add manifest authors as maintainer to crate + + except KeyError: + log.error("No author field found in manifest of nextflow.config") + return + # look at git contributors for author names + try: + git_contributors: Set[str] = set() + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return + commits_touching_path = list(self.pipeline_obj.repo.iter_commits(paths="main.nf")) + + for commit in commits_touching_path: + if commit.author.name is not None: + git_contributors.add(commit.author.name) + # exclude bots + contributors = {c for c in git_contributors if not c.endswith("bot") and c != "Travis CI User"} + + log.debug(f"Found {len(contributors)} git authors") + + progress_bar = Progress( + "[bold blue]{task.description}", + BarColumn(bar_width=None), + "[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[test_name]}", + transient=True, + disable=os.environ.get("HIDE_PROGRESS", None) is not None, + ) + with progress_bar: + bump_progress = progress_bar.add_task( + "Searching for author names on GitHub", total=len(contributors), test_name="" + ) + + for git_author in contributors: + progress_bar.update(bump_progress, advance=1, test_name=git_author) + git_author = ( + requests.get(f"https://api.github.com/users/{git_author}").json().get("name", git_author) + ) + if git_author is None: + log.debug(f"Could not find name for {git_author}") + continue + + except AttributeError: + log.debug("Could not find git contributors") + + # remove usernames (just keep names with spaces) + named_contributors = {c for c in contributors if " " in c} + + for author in named_contributors: + log.debug(f"Adding author: {author}") + + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return + # get email from git log + email = self.pipeline_obj.repo.git.log(f"--author={author}", "--pretty=format:%ae", "-1") + orcid = get_orcid(author) + author_entitity = self.crate.add( + Person( + self.crate, orcid if orcid is not None else "#" + email, properties={"name": author, "email": email} + ) + ) + wf_file.append_to("creator", author_entitity) + if author in authors: + wf_file.append_to("maintainer", author_entitity) + + +def get_orcid(name: str) -> Optional[str]: + """ + Get the ORCID for a given name + + Args: + name (str): Name of the author + + Returns: + str: ORCID URI or None + """ + base_url = "https://pub.orcid.org/v3.0/search/" + headers = { + "Accept": "application/json", + } + params = {"q": f'family-name:"{name.split()[-1]}" AND given-names:"{name.split()[0]}"'} + response = requests.get(base_url, params=params, headers=headers) + + if response.status_code == 200: + json_response = response.json() + if json_response.get("num-found") == 1: + orcid_uri = json_response.get("result")[0].get("orcid-identifier", {}).get("uri") + log.info(f"Using found ORCID for {name}. Please double-check: {orcid_uri}") + return orcid_uri + else: + log.debug(f"No exact ORCID found for {name}. See {response.url}") + return None + else: + log.info(f"API request to ORCID unsuccessful. Status code: {response.status_code}") + return None diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 95ed5e5b6e..61fd6bc2d7 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -43,7 +43,7 @@ def __init__(self): self.schema_from_scratch = False self.no_prompts = False self.web_only = False - self.web_schema_build_url = "https://nf-co.re/pipeline_schema_builder" + self.web_schema_build_url = "https://oldsite.nf-co.re/pipeline_schema_builder" self.web_schema_build_web_url = None self.web_schema_build_api_url = None self.validation_plugin = None @@ -51,7 +51,7 @@ def __init__(self): self.defs_notation = None self.ignored_params = [] - # Update the validation plugin code everytime the schema gets changed + # Update the validation plugin code every time the schema gets changed def set_schema_filename(self, schema: str) -> None: self._schema_filename = schema self._update_validation_plugin_from_config() @@ -71,7 +71,7 @@ def _update_validation_plugin_from_config(self) -> None: else: conf = nf_core.utils.fetch_wf_config(Path(self.pipeline_dir)) - plugins = str(conf.get("plugins", "")).strip('"').strip("'").strip(" ").split(",") + plugins = str(conf.get("plugins", "")).strip("'\"").strip(" ").split(",") plugin_found = False for plugin_instance in plugins: if "nf-schema" in plugin_instance: @@ -96,11 +96,18 @@ def _update_validation_plugin_from_config(self) -> None: conf.get("validation.help.shortParameter", "help"), conf.get("validation.help.fullParameter", "helpFull"), conf.get("validation.help.showHiddenParameter", "showHidden"), + "trace_report_suffix", # report suffix should be ignored by default as it is a Java Date object ] # Help parameter should be ignored by default - ignored_params_config = conf.get("validation", {}).get("defaultIgnoreParams", []) + ignored_params_config_str = conf.get("validation.defaultIgnoreParams", "") + ignored_params_config = [ + item.strip().strip("'") for item in ignored_params_config_str[1:-1].split(",") + ] # Extract list elements and remove whitespace + if len(ignored_params_config) > 0: + log.debug(f"Ignoring parameters from config: {ignored_params_config}") ignored_params.extend(ignored_params_config) self.ignored_params = ignored_params + log.debug(f"Ignoring parameters: {self.ignored_params}") self.schema_draft = "https://json-schema.org/draft/2020-12/schema" else: @@ -118,6 +125,7 @@ def get_schema_path( # Supplied path exists - assume a local pipeline directory or schema if path.exists(): log.debug(f"Path exists: {path}. Assuming local pipeline directory or schema") + local_only = True if revision is not None: log.warning(f"Local workflow supplied, ignoring revision '{revision}'") if path.is_dir(): @@ -373,7 +381,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default) # If we have a default in the schema, check it matches the config if "default" in schema_param and ( (schema_param["type"] == "boolean" and str(config_default).lower() != str(schema_param["default"]).lower()) - and (str(schema_param["default"]) != str(config_default).strip('"').strip("'")) + and (str(schema_param["default"]) != str(config_default).strip("'\"")) ): # Check that we are not deferring the execution of this parameter in the schema default with squiggly brakcets if schema_param["type"] != "string" or "{" not in schema_param["default"]: @@ -950,6 +958,7 @@ def launch_web_builder(self): """ Send pipeline schema to web builder and wait for response """ + content = { "post_content": "json_schema", "api": "true", @@ -958,12 +967,13 @@ def launch_web_builder(self): "schema": json.dumps(self.schema), } web_response = nf_core.utils.poll_nfcore_web_api(self.web_schema_build_url, content) + try: if "api_url" not in web_response: raise AssertionError('"api_url" not in web_response') if "web_url" not in web_response: raise AssertionError('"web_url" not in web_response') - # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatability. + # DO NOT FIX THIS TYPO. Needs to stay in sync with the website. Maintaining for backwards compatibility. if web_response["status"] != "recieved": raise AssertionError( f'web_response["status"] should be "recieved", but it is "{web_response["status"]}"' diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index fced35dc20..781b4f5f00 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,7 +6,7 @@ import re import shutil from pathlib import Path -from typing import Dict, Optional, Union +from typing import Any, Dict, Optional, Tuple, Union import git import questionary @@ -21,6 +21,7 @@ import nf_core.pipelines.create.create import nf_core.pipelines.list import nf_core.utils +from nf_core.pipelines.lint_utils import dump_yaml_with_prettier log = logging.getLogger(__name__) @@ -104,7 +105,7 @@ def __init__( with open(template_yaml_path) as f: self.config_yml.template = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: - yaml.safe_dump(self.config_yml.model_dump(), fh) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), fh) log.info(f"Saved pipeline creation settings to '{self.config_yml_path}'") raise SystemExit( f"Please commit your changes and delete the {template_yaml_path} file. Then run the sync command again." @@ -119,7 +120,7 @@ def __init__( requests.auth.HTTPBasicAuth(self.gh_username, os.environ["GITHUB_AUTH_TOKEN"]) ) - def sync(self): + def sync(self) -> None: """Find workflow attributes, create a new template pipeline on TEMPLATE""" # Clear requests_cache so that we don't get stale API responses @@ -270,22 +271,27 @@ def make_template_pipeline(self): self.config_yml.template.force = True with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), config_path) try: - nf_core.pipelines.create.create.PipelineCreate( + pipeline_create_obj = nf_core.pipelines.create.create.PipelineCreate( outdir=str(self.pipeline_dir), from_config_file=True, no_git=True, force=True, - ).init_pipeline() + ) + pipeline_create_obj.init_pipeline() # set force to false to avoid overwriting files in the future if self.config_yml.template is not None: + self.config_yml.template = pipeline_create_obj.config # Set force true in config to overwrite existing files self.config_yml.template.force = False - with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + # Set outdir as the current directory to avoid local info leaking + self.config_yml.template.outdir = "." + # Update nf-core version + self.config_yml.nf_core_version = nf_core.__version__ + dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump(exclude_none=True)) except Exception as err: # Reset to where you were to prevent git getting messed up. @@ -410,12 +416,8 @@ def close_open_template_merge_prs(self): list_prs_url = f"https://api.github.com/repos/{self.gh_repo}/pulls" with self.gh_api.cache_disabled(): list_prs_request = self.gh_api.get(list_prs_url) - try: - list_prs_json = json.loads(list_prs_request.content) - list_prs_pp = json.dumps(list_prs_json, indent=4) - except Exception: - list_prs_json = list_prs_request.content - list_prs_pp = list_prs_request.content + + list_prs_json, list_prs_pp = self._parse_json_response(list_prs_request) log.debug(f"GitHub API listing existing PRs:\n{list_prs_url}\n{list_prs_pp}") if list_prs_request.status_code != 200: @@ -456,12 +458,8 @@ def close_open_pr(self, pr) -> bool: # Update the PR status to be closed with self.gh_api.cache_disabled(): pr_request = self.gh_api.patch(url=pr["url"], data=json.dumps({"state": "closed"})) - try: - pr_request_json = json.loads(pr_request.content) - pr_request_pp = json.dumps(pr_request_json, indent=4) - except Exception: - pr_request_json = pr_request.content - pr_request_pp = pr_request.content + + pr_request_json, pr_request_pp = self._parse_json_response(pr_request) # PR update worked if pr_request.status_code == 200: @@ -475,6 +473,22 @@ def close_open_pr(self, pr) -> bool: log.warning(f"Could not close PR ('{pr_request.status_code}'):\n{pr['url']}\n{pr_request_pp}") return False + @staticmethod + def _parse_json_response(response) -> Tuple[Any, str]: + """Helper method to parse JSON response and create pretty-printed string. + + Args: + response: requests.Response object + + Returns: + Tuple of (parsed_json, pretty_printed_str) + """ + try: + json_data = json.loads(response.content) + return json_data, json.dumps(json_data, indent=4) + except Exception: + return response.content, str(response.content) + def reset_target_dir(self): """ Reset the target pipeline directory. Check out the original branch. diff --git a/nf_core/subworkflows/__init__.py b/nf_core/subworkflows/__init__.py index 88e8a09388..8e3c85a271 100644 --- a/nf_core/subworkflows/__init__.py +++ b/nf_core/subworkflows/__init__.py @@ -3,5 +3,6 @@ from .install import SubworkflowInstall from .lint import SubworkflowLint from .list import SubworkflowList +from .patch import SubworkflowPatch from .remove import SubworkflowRemove from .update import SubworkflowUpdate diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index b366ddfb51..cedae62f11 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -11,11 +11,12 @@ import questionary import rich +import ruamel.yaml import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult -from nf_core.pipelines.lint_utils import console +from nf_core.pipelines.lint_utils import console, run_prettier_on_file log = logging.getLogger(__name__) @@ -45,6 +46,7 @@ def __init__( self, directory, fail_warned=False, + fix=False, remote_url=None, branch=None, no_pull=False, @@ -55,6 +57,7 @@ def __init__( component_type="subworkflows", directory=directory, fail_warned=fail_warned, + fix=fix, remote_url=remote_url, branch=branch, no_pull=no_pull, @@ -214,6 +217,10 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): # Otherwise run all the lint tests else: + # Update meta.yml file if requested + if self.fix: + self.update_meta_yml_file(swf) + if self.repo_type == "pipeline" and self.modules_json: # Set correct sha version = self.modules_json.get_subworkflow_version(swf.component_name, swf.repo_url, swf.org) @@ -230,3 +237,56 @@ def lint_subworkflow(self, swf, progress_bar, registry, local=False): self.failed += warned self.failed += [LintResult(swf, *s) for s in swf.failed] + + def update_meta_yml_file(self, swf): + """ + Update the meta.yml file with the correct inputs and outputs + """ + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) + + # Read meta.yml + with open(swf.meta_yml) as fh: + meta_yaml = yaml.load(fh) + meta_yaml_corrected = meta_yaml.copy() + # Obtain inputs and outputs from main.nf + swf.get_inputs_from_main_nf() + swf.get_outputs_from_main_nf() + + # Compare inputs and add them if missing + if "input" in meta_yaml: + # Delete inputs from meta.yml which are not present in main.nf + meta_yaml_corrected["input"] = [ + input for input in meta_yaml["input"] if list(input.keys())[0] in swf.inputs + ] + # Obtain inputs from main.nf missing in meta.yml + inputs_correct = [ + list(input.keys())[0] for input in meta_yaml_corrected["input"] if list(input.keys())[0] in swf.inputs + ] + inputs_missing = [input for input in swf.inputs if input not in inputs_correct] + # Add missing inputs to meta.yml + for missing_input in inputs_missing: + meta_yaml_corrected["input"].append({missing_input: {"description": ""}}) + + if "output" in meta_yaml: + # Delete outputs from meta.yml which are not present in main.nf + meta_yaml_corrected["output"] = [ + output for output in meta_yaml["output"] if list(output.keys())[0] in swf.outputs + ] + # Obtain output from main.nf missing in meta.yml + outputs_correct = [ + list(output.keys())[0] + for output in meta_yaml_corrected["output"] + if list(output.keys())[0] in swf.outputs + ] + outputs_missing = [output for output in swf.outputs if output not in outputs_correct] + # Add missing inputs to meta.yml + for missing_output in outputs_missing: + meta_yaml_corrected["output"].append({missing_output: {"description": ""}}) + + # Write corrected meta.yml to file + with open(swf.meta_yml, "w") as fh: + log.info(f"Updating {swf.meta_yml}") + yaml.dump(meta_yaml_corrected, fh) + run_prettier_on_file(fh.name) diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py index 9c96df7563..91242e0869 100644 --- a/nf_core/subworkflows/lint/meta_yml.py +++ b/nf_core/subworkflows/lint/meta_yml.py @@ -1,4 +1,5 @@ import json +import logging from pathlib import Path import jsonschema.validators @@ -6,6 +7,8 @@ import nf_core.components.components_utils +log = logging.getLogger(__name__) + def meta_yml(subworkflow_lint_object, subworkflow): """ @@ -65,6 +68,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.passed.append(("meta_input", f"`{input}` specified", subworkflow.meta_yml)) else: subworkflow.failed.append(("meta_input", f"`{input}` missing in `meta.yml`", subworkflow.meta_yml)) + else: + log.debug(f"No inputs specified in subworkflow `main.nf`: {subworkflow.component_name}") if "output" in meta_yaml: meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] @@ -75,6 +80,8 @@ def meta_yml(subworkflow_lint_object, subworkflow): subworkflow.failed.append( ("meta_output", f"`{output}` missing in `meta.yml`", subworkflow.meta_yml) ) + else: + log.debug(f"No outputs specified in subworkflow `main.nf`: {subworkflow.component_name}") # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == subworkflow.workflow_name: diff --git a/nf_core/subworkflows/lint/subworkflow_changes.py b/nf_core/subworkflows/lint/subworkflow_changes.py index a9c9616a21..cf0fd7211c 100644 --- a/nf_core/subworkflows/lint/subworkflow_changes.py +++ b/nf_core/subworkflows/lint/subworkflow_changes.py @@ -2,9 +2,12 @@ Check whether the content of a subworkflow has changed compared to the original repository """ +import shutil +import tempfile from pathlib import Path import nf_core.modules.modules_repo +from nf_core.components.components_differ import ComponentsDiffer def subworkflow_changes(subworkflow_lint_object, subworkflow): @@ -20,7 +23,29 @@ def subworkflow_changes(subworkflow_lint_object, subworkflow): Only runs when linting a pipeline, not the modules repository """ - tempdir = subworkflow.component_dir + if subworkflow.is_patched: + # If the subworkflow is patched, we need to apply + # the patch in reverse before comparing with the remote + tempdir_parent = Path(tempfile.mkdtemp()) + tempdir = tempdir_parent / "tmp_subworkflow_dir" + shutil.copytree(subworkflow.component_dir, tempdir) + try: + new_lines = ComponentsDiffer.try_apply_patch( + subworkflow.component_type, + subworkflow.component_name, + subworkflow.org, + subworkflow.patch_path, + tempdir, + reverse=True, + ) + for file, lines in new_lines.items(): + with open(tempdir / file, "w") as fh: + fh.writelines(lines) + except LookupError: + # This error is already reported by subworkflow_patch, so just return + return + else: + tempdir = subworkflow.component_dir subworkflow.branch = subworkflow_lint_object.modules_json.get_component_branch( "subworkflows", subworkflow.component_name, subworkflow.repo_url, subworkflow.org ) diff --git a/nf_core/subworkflows/patch.py b/nf_core/subworkflows/patch.py new file mode 100644 index 0000000000..3c8b3d5e4d --- /dev/null +++ b/nf_core/subworkflows/patch.py @@ -0,0 +1,10 @@ +import logging + +from nf_core.components.patch import ComponentPatch + +log = logging.getLogger(__name__) + + +class SubworkflowPatch(ComponentPatch): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + super().__init__(pipeline_dir, "subworkflows", remote_url, branch, no_pull, installed_by) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index e2a76ccaeb..dd61b72a2b 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -395,8 +395,16 @@ def get_component_git_log( old_component_path = Path("modules", component_name) commits_old_iter = self.repo.iter_commits(max_count=depth, paths=old_component_path) - commits_old = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_old_iter] - commits_new = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_new_iter] + try: + commits_old = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_old_iter] + commits_new = [{"git_sha": commit.hexsha, "trunc_message": commit.message} for commit in commits_new_iter] + except git.GitCommandError as e: + log.error( + f"Git error: {e}\n" + "To solve this, you can try to remove the cloned rempository and run the command again.\n" + f"This repository is typically found at `{self.local_repo_dir}`" + ) + raise UserWarning commits = iter(commits_new + commits_old) return commits diff --git a/nf_core/utils.py b/nf_core/utils.py index 663efb6b46..30b0743493 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -5,6 +5,7 @@ import concurrent.futures import datetime import errno +import fnmatch import hashlib import io import json @@ -19,7 +20,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Literal, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -36,6 +37,9 @@ import nf_core +if TYPE_CHECKING: + from nf_core.pipelines.schema import PipelineSchema + log = logging.getLogger(__name__) # ASCII nf-core logo @@ -52,14 +56,29 @@ [ ("qmark", "fg:ansiblue bold"), # token in front of the question ("question", "bold"), # question text - ("answer", "fg:ansigreen nobold bg:"), # submitted answer text behind the question - ("pointer", "fg:ansiyellow bold"), # pointer used in select and checkbox prompts - ("highlighted", "fg:ansiblue bold"), # pointed-at choice in select and checkbox prompts - ("selected", "fg:ansiyellow noreverse bold"), # style for a selected item of a checkbox + ( + "answer", + "fg:ansigreen nobold bg:", + ), # submitted answer text behind the question + ( + "pointer", + "fg:ansiyellow bold", + ), # pointer used in select and checkbox prompts + ( + "highlighted", + "fg:ansiblue bold", + ), # pointed-at choice in select and checkbox prompts + ( + "selected", + "fg:ansiyellow noreverse bold", + ), # style for a selected item of a checkbox ("separator", "fg:ansiblack"), # separator in lists ("instruction", ""), # user instructions for select, rawselect, checkbox ("text", ""), # plain text - ("disabled", "fg:gray italic"), # disabled choices for select and checkbox prompts + ( + "disabled", + "fg:gray italic", + ), # disabled choices for select and checkbox prompts ("choice-default", "fg:ansiblack"), ("choice-default-changed", "fg:ansiyellow"), ("choice-required", "fg:ansired"), @@ -79,7 +98,11 @@ def fetch_remote_version(source_url): return remote_version -def check_if_outdated(current_version=None, remote_version=None, source_url="https://nf-co.re/tools_version"): +def check_if_outdated( + current_version=None, + remote_version=None, + source_url="https://nf-co.re/tools_version", +): """ Check if the current version of nf-core is outdated """ @@ -146,11 +169,12 @@ def __init__(self, wf_path: Path) -> None: self.wf_path = Path(wf_path) self.pipeline_name: Optional[str] = None self.pipeline_prefix: Optional[str] = None - self.schema_obj: Optional[Dict] = None + self.schema_obj: Optional[PipelineSchema] = None + self.repo: Optional[git.Repo] = None try: - repo = git.Repo(self.wf_path) - self.git_sha = repo.head.object.hexsha + self.repo = git.Repo(self.wf_path) + self.git_sha = self.repo.head.object.hexsha except Exception as e: log.debug(f"Could not find git hash for pipeline: {self.wf_path}. {e}") @@ -254,7 +278,7 @@ def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: """ log.debug(f"Got '{wf_path}' as path") - + wf_path = Path(wf_path) config = {} cache_fn = None cache_basedir = None @@ -323,7 +347,7 @@ def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: # If we can, save a cached copy # HINT: during testing phase (in test_download, for example) we don't want - # to save configuration copy in $HOME, otherwise the tests/test_download.py::DownloadTest::test_wf_use_local_configs + # to save configuration copy in $HOME, otherwise the tests/pipelines/test_download.py::DownloadTest::test_wf_use_local_configs # will fail after the first attempt. It's better to not save temporary data # in others folders than tmp when doing tests in general if cache_path and cache_config: @@ -414,7 +438,7 @@ def wait_cli_function(poll_func: Callable[[], bool], refresh_per_second: int = 2 refresh_per_second (int): Refresh this many times per second. Default: 20. Returns: - None. Just sits in an infite loop until the function returns True. + None. Just sits in an infinite loop until the function returns True. """ try: spinner = Spinner("dots2", "Use ctrl+c to stop waiting and force exit.") @@ -433,7 +457,7 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: Takes argument api_url for URL - Expects API reponse to be valid JSON and contain a top-level 'status' key. + Expects API response to be valid JSON and contain a top-level 'status' key. """ # Run without requests_cache so that we get the updated statuses with requests_cache.disabled(): @@ -441,6 +465,7 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: if post_data is None: response = requests.get(api_url, headers={"Cache-Control": "no-cache"}) else: + log.debug(f"requesting {api_url} with {post_data}") response = requests.post(url=api_url, data=post_data) except requests.exceptions.Timeout: raise AssertionError(f"URL timed out: {api_url}") @@ -526,7 +551,8 @@ def __call__(self, r): with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) self.auth = requests.auth.HTTPBasicAuth( - gh_cli_config["github.com"]["user"], gh_cli_config["github.com"]["oauth_token"] + gh_cli_config["github.com"]["user"], + gh_cli_config["github.com"]["oauth_token"], ) self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}" except Exception: @@ -607,11 +633,11 @@ def request_retry(self, url, post_data=None): while True: # GET request if post_data is None: - log.debug(f"Seding GET request to {url}") + log.debug(f"Sending GET request to {url}") r = self.get(url=url) # POST request else: - log.debug(f"Seding POST request to {url}") + log.debug(f"Sending POST request to {url}") r = self.post(url=url, json=post_data) # Failed but expected - try again @@ -717,12 +743,12 @@ def parse_anaconda_licence(anaconda_response, version=None): license = re.sub(r"GNU GENERAL PUBLIC LICENSE", "GPL", license, flags=re.IGNORECASE) license = license.replace("GPL-", "GPLv") license = re.sub(r"GPL\s*([\d\.]+)", r"GPL v\1", license) # Add v prefix to GPL version if none found - license = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", license) # Remove superflous .0 from GPL version + license = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", license) # Remove superfluous .0 from GPL version license = re.sub(r"GPL \(([^\)]+)\)", r"GPL \1", license) license = re.sub(r"GPL\s*v", "GPL v", license) # Normalise whitespace to one space between GPL and v license = re.sub(r"\s*(>=?)\s*(\d)", r" \1\2", license) # Normalise whitespace around >= GPL versions - license = license.replace("Clause", "clause") # BSD capitilisation - license = re.sub(r"-only$", "", license) # Remove superflous GPL "only" version suffixes + license = license.replace("Clause", "clause") # BSD capitalisation + license = re.sub(r"-only$", "", license) # Remove superfluous GPL "only" version suffixes clean_licences.append(license) return clean_licences @@ -794,12 +820,18 @@ def get_tag_date(tag_date): # Obtain version and build match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"]) if match is not None: - all_docker[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img} + all_docker[match.group(1)] = { + "date": get_tag_date(img["updated"]), + "image": img, + } elif img["image_type"] == "Singularity": # Obtain version and build match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"]) if match is not None: - all_singularity[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img} + all_singularity[match.group(1)] = { + "date": get_tag_date(img["updated"]), + "image": img, + } # Obtain common builds from Docker and Singularity images common_keys = list(all_docker.keys() & all_singularity.keys()) current_date = None @@ -929,13 +961,19 @@ def prompt_pipeline_release_branch( # Releases if len(wf_releases) > 0: for tag in map(lambda release: release.get("tag_name"), wf_releases): - tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] + tag_display = [ + ("fg:ansiblue", f"{tag} "), + ("class:choice-default", "[release]"), + ] choices.append(questionary.Choice(title=tag_display, value=tag)) tag_set.append(str(tag)) # Branches for branch in wf_branches.keys(): - branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] + branch_display = [ + ("fg:ansiyellow", f"{branch} "), + ("class:choice-default", "[branch]"), + ] choices.append(questionary.Choice(title=branch_display, value=branch)) tag_set.append(branch) @@ -966,7 +1004,8 @@ def validate(self, value): return True else: raise questionary.ValidationError( - message="Invalid remote cache index file", cursor_position=len(value.text) + message="Invalid remote cache index file", + cursor_position=len(value.text), ) else: return True @@ -996,7 +1035,13 @@ def get_repo_releases_branches(pipeline, wfs): pipeline = wf.full_name # Store releases and stop loop - wf_releases = list(sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + wf_releases = list( + sorted( + wf.releases, + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, + ) + ) break # Arbitrary GitHub repo @@ -1016,7 +1061,13 @@ def get_repo_releases_branches(pipeline, wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") except AttributeError: # Success! We have a list, which doesn't work with .get() which is looking for a dict key - wf_releases = list(sorted(rel_r.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + wf_releases = list( + sorted( + rel_r.json(), + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, + ) + ) # Get release tag commit hashes if len(wf_releases) > 0: @@ -1050,15 +1101,26 @@ def get_repo_releases_branches(pipeline, wfs): class NFCoreTemplateConfig(BaseModel): + """Template configuration schema""" + org: Optional[str] = None + """ Organisation name """ name: Optional[str] = None + """ Pipeline name """ description: Optional[str] = None + """ Pipeline description """ author: Optional[str] = None + """ Pipeline author """ version: Optional[str] = None + """ Pipeline version """ force: Optional[bool] = True + """ Force overwrite of existing files """ outdir: Optional[Union[str, Path]] = None + """ Output directory """ skip_features: Optional[list] = None + """ Skip features. See https://nf-co.re/docs/nf-core-tools/pipelines/create for a list of features. """ is_nfcore: Optional[bool] = None + """ Whether the pipeline is an nf-core pipeline. """ # convert outdir to str @field_validator("outdir") @@ -1077,17 +1139,121 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] +class NFCoreYamlLintConfig(BaseModel): + """ + schema for linting config in `.nf-core.yml` should cover: + + .. code-block:: yaml + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + # multiqc_config: False + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + template_strings: + - docs/my_pdf.pdf + nfcore_components: False + """ + + files_unchanged: Optional[Union[bool, List[str]]] = None + """ List of files that should not be changed """ + modules_config: Optional[Optional[Union[bool, List[str]]]] = None + """ List of modules that should not be changed """ + merge_markers: Optional[Optional[Union[bool, List[str]]]] = None + """ List of files that should not contain merge markers """ + nextflow_config: Optional[Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]]] = None + """ List of Nextflow config files that should not be changed """ + multiqc_config: Optional[Union[bool, List[str]]] = None + """ List of MultiQC config options that be changed """ + files_exist: Optional[Union[bool, List[str]]] = None + """ List of files that can not exist """ + template_strings: Optional[Optional[Union[bool, List[str]]]] = None + """ List of files that can contain template strings """ + readme: Optional[Union[bool, List[str]]] = None + """ Lint the README.md file """ + nfcore_components: Optional[bool] = None + """ Lint all required files to use nf-core modules and subworkflows """ + actions_ci: Optional[bool] = None + """ Lint all required files to use GitHub Actions CI """ + actions_awstest: Optional[bool] = None + """ Lint all required files to run tests on AWS """ + actions_awsfulltest: Optional[bool] = None + """ Lint all required files to run full tests on AWS """ + pipeline_todos: Optional[bool] = None + """ Lint for TODOs statements""" + plugin_includes: Optional[bool] = None + """ Lint for nextflow plugin """ + pipeline_name_conventions: Optional[bool] = None + """ Lint for pipeline name conventions """ + schema_lint: Optional[bool] = None + """ Lint nextflow_schema.json file""" + schema_params: Optional[bool] = None + """ Lint schema for all params """ + system_exit: Optional[bool] = None + """ Lint for System.exit calls in groovy/nextflow code """ + schema_description: Optional[bool] = None + """ Check that every parameter in the schema has a description. """ + actions_schema_validation: Optional[bool] = None + """ Lint GitHub Action workflow files with schema""" + modules_json: Optional[bool] = None + """ Lint modules.json file """ + modules_structure: Optional[bool] = None + """ Lint modules structure """ + base_config: Optional[bool] = None + """ Lint base.config file """ + nfcore_yml: Optional[bool] = None + """ Lint nf-core.yml """ + version_consistency: Optional[bool] = None + """ Lint for version consistency """ + included_configs: Optional[bool] = None + """ Lint for included configs """ + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + if getattr(self, item, default) is None: + return default + return getattr(self, item, default) + + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) class NFCoreYamlConfig(BaseModel): - repository_type: str + """.nf-core.yml configuration file schema""" + + repository_type: Optional[Literal["pipeline", "modules"]] = None + """ Type of repository """ nf_core_version: Optional[str] = None + """ Version of nf-core/tools used to create/update the pipeline """ org_path: Optional[str] = None - lint: Optional[LintConfigType] = None + """ Path to the organisation's modules repository (used for modules repo_type only) """ + lint: Optional[NFCoreYamlLintConfig] = None + """ Pipeline linting configuration, see https://nf-co.re/docs/nf-core-tools/pipelines/lint#linting-config for examples and documentation """ template: Optional[NFCoreTemplateConfig] = None + """ Pipeline template configuration """ bump_version: Optional[Dict[str, bool]] = None + """ Disable bumping of the version for a module/subworkflow (when repository_type is modules). See https://nf-co.re/docs/nf-core-tools/modules/bump-versions for more information. """ update: Optional[Dict[str, Union[str, bool, Dict[str, Union[str, Dict[str, Union[str, bool]]]]]]] = None + """ Disable updating specific modules/subworkflows (when repository_type is pipeline). See https://nf-co.re/docs/nf-core-tools/modules/update for more information. """ def __getitem__(self, item: str) -> Any: return getattr(self, item) @@ -1095,6 +1261,26 @@ def __getitem__(self, item: str) -> Any: def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) + + def model_dump(self, **kwargs) -> Dict[str, Any]: + # Get the initial data + config = super().model_dump(**kwargs) + + if self.repository_type == "modules": + # Fields to exclude for modules + fields_to_exclude = ["template", "update"] + else: # pipeline + # Fields to exclude for pipeline + fields_to_exclude = ["bump_version", "org_path"] + + # Remove the fields based on repository_type + for field in fields_to_exclude: + config.pop(field, None) + + return config + def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path], Optional[NFCoreYamlConfig]]: """ @@ -1133,9 +1319,38 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] except ValidationError as e: error_message = f"Config file '{config_fn}' is invalid" for error in e.errors(): - error_message += f"\n{error['loc'][0]}: {error['msg']}" + error_message += f"\n{error['loc'][0]}: {error['msg']}\ninput: {error['input']}" raise AssertionError(error_message) + wf_config = fetch_wf_config(Path(directory)) + if nf_core_yaml_config["repository_type"] == "pipeline" and wf_config: + # Retrieve information if template from config file is empty + template = tools_config.get("template") + config_template_keys = template.keys() if template is not None else [] + if nf_core_yaml_config.template is None: + # The .nf-core.yml file did not contain template information + nf_core_yaml_config.template = NFCoreTemplateConfig( + org="nf-core", + name=wf_config["manifest.name"].strip("'\"").split("/")[-1], + description=wf_config["manifest.description"].strip("'\""), + author=wf_config["manifest.author"].strip("'\""), + version=wf_config["manifest.version"].strip("'\""), + outdir=str(directory), + is_nfcore=True, + ) + elif "prefix" in config_template_keys or "skip" in config_template_keys: + # The .nf-core.yml file contained the old prefix or skip keys + nf_core_yaml_config.template = NFCoreTemplateConfig( + org=tools_config["template"].get("prefix", tools_config["template"].get("org", "nf-core")), + name=tools_config["template"].get("name", wf_config["manifest.name"].strip("'\"").split("/")[-1]), + description=tools_config["template"].get("description", wf_config["manifest.description"].strip("'\"")), + author=tools_config["template"].get("author", wf_config["manifest.author"].strip("'\"")), + version=tools_config["template"].get("version", wf_config["manifest.version"].strip("'\"")), + outdir=tools_config["template"].get("outdir", str(directory)), + skip_features=tools_config["template"].get("skip", tools_config["template"].get("skip_features")), + is_nfcore=tools_config["template"].get("prefix", tools_config["template"].get("org")) == "nf-core", + ) + log.debug("Using config file: %s", config_fn) return config_fn, nf_core_yaml_config @@ -1158,7 +1373,7 @@ def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> U return None -def sort_dictionary(d): +def sort_dictionary(d: Dict) -> Dict: """Sorts a nested dictionary recursively""" result = {} for k, v in sorted(d.items()): @@ -1299,3 +1514,21 @@ def set_wd(path: Path) -> Generator[None, None, None]: yield finally: os.chdir(start_wd) + + +def get_wf_files(wf_path: Path): + """Return a list of all files in a directory (ignores .gitigore files)""" + + wf_files = [] + + with open(Path(wf_path, ".gitignore")) as f: + lines = f.read().splitlines() + ignore = [line for line in lines if line and not line.startswith("#")] + + for path in Path(wf_path).rglob("*"): + if any(fnmatch.fnmatch(str(path), pattern) for pattern in ignore): + continue + if path.is_file(): + wf_files.append(str(path)) + + return wf_files diff --git a/requirements-dev.txt b/requirements-dev.txt index aa43ee3fe3..aab9b1e5d7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ responses ruff Sphinx sphinx-rtd-theme -textual-dev==1.5.1 +textual-dev==1.6.1 types-PyYAML types-requests types-jsonschema @@ -16,7 +16,7 @@ types-requests types-setuptools typing_extensions >=4.0.0 pytest-asyncio -pytest-textual-snapshot==0.4.0 +pytest-textual-snapshot==1.0.0 pytest-workflow>=2.0.0 pytest>=8.0.0 ruff diff --git a/requirements.txt b/requirements.txt index eba6460f03..51259938a8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ packaging pillow pdiff pre-commit -prompt_toolkit<=3.0.36 +prompt_toolkit<=3.0.48 pydantic>=2.2.1 pyyaml questionary>=2.0.1 @@ -18,7 +18,10 @@ requests requests_cache rich-click==1.8.* rich>=13.3.1 +rocrate +repo2rocrate tabulate textual==0.71.0 trogon pdiff +ruamel.yaml diff --git a/setup.py b/setup.py index 45df29b8bc..11b3022494 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.0.0dev" +version = "3.0.3dev" with open("README.md") as f: readme = f.read() diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf new file mode 100644 index 0000000000..20c7075481 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf @@ -0,0 +1,11 @@ +process CAT_FASTQ { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' : + 'community.wave.seqera.io/library/coreutils:9.5--ae99c88a9b28c264' }" + + // truncated + +} diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf new file mode 100644 index 0000000000..8278ac7917 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6' : + 'community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1' }" + + // truncated + +} diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf new file mode 100644 index 0000000000..234ca04a45 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER_MULLED { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3' : + 'community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd' }" + + // truncated + +} diff --git a/tests/modules/test_lint.py b/tests/modules/test_lint.py index 51c814b88c..5372807987 100644 --- a/tests/modules/test_lint.py +++ b/tests/modules/test_lint.py @@ -274,7 +274,7 @@ def test_modules_lint_patched_modules(self): all_modules=True, ) - assert len(module_lint.failed) == 1 + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -305,6 +305,14 @@ def test_modules_lint_check_url(self): len(mocked_ModuleLint.failed) == failed ), f"{test}: Expected {failed} FAIL, got {len(mocked_ModuleLint.failed)}." + def test_modules_lint_update_meta_yml(self): + """update the meta.yml of a module""" + module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules, fix=True) + module_lint.lint(print_results=False, module="bpipe/test") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + def test_modules_lint_snapshot_file(self): """Test linting a module with a snapshot file""" module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) @@ -432,7 +440,7 @@ def test_modules_environment_yml_file_sorted_incorrectly(self): ) as fh: yaml_content = yaml.safe_load(fh) # Add a new dependency to the environment.yml file and reverse the order - yaml_content["dependencies"].append("z") + yaml_content["dependencies"].append("z=0.0.0") yaml_content["dependencies"].reverse() yaml_content = yaml.dump(yaml_content) with open( @@ -513,25 +521,6 @@ def test_modules_meta_yml_incorrect_licence_field(self): assert len(module_lint.warned) >= 0 assert module_lint.failed[0].lint_test == "meta_yml_valid" - def test_modules_meta_yml_input_mismatch(self): - """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: - main_nf = fh.read() - main_nf_new = main_nf.replace("path bam", "path bai") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf_new) - module_lint = nf_core.modules.lint.ModuleLint(directory=self.nfcore_modules) - module_lint.lint(print_results=False, module="bpipe/test") - with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: - fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" - assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2, f"Linting warning with {[x.__dict__ for x in module_lint.warned]}" - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_input_meta_only") == 1 - assert lint_tests.count("meta_input_main_only") == 1 - def test_modules_meta_yml_output_mismatch(self): """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: @@ -543,13 +532,9 @@ def test_modules_meta_yml_output_mismatch(self): module_lint.lint(print_results=False, module="bpipe/test") with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: fh.write(main_nf) - assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) >= 0 - assert len(module_lint.warned) == 2 - lint_tests = [x.lint_test for x in module_lint.warned] - # check that it is there twice: - assert lint_tests.count("meta_output_meta_only") == 1 - assert lint_tests.count("meta_output_main_only") == 1 + assert "Module `meta.yml` does not match `main.nf`" in module_lint.failed[0].message def test_modules_meta_yml_incorrect_name(self): """Test linting a module with an incorrect name in meta.yml""" diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 0368c146c4..325a8073b7 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -175,14 +175,17 @@ def test_mod_json_repo_present(self): assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True assert mod_json_obj.repo_present("INVALID_REPO") is False - def test_mod_json_module_present(self): - """Tests the module_present function""" + def test_mod_json_component_present(self): + """Tests the component_present function""" mod_json_obj = ModulesJson(self.pipeline_dir) - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.component_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") is True + assert ( + mod_json_obj.component_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") + is False + ) + assert mod_json_obj.component_present("fastqc", "INVALID_REPO", "INVALID_DIR", "modules") is False + assert mod_json_obj.component_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR", "modules") is False def test_mod_json_get_module_version(self): """Test the get_module_version function""" diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index c3eb94d374..f608278618 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -21,10 +21,10 @@ testing if the update commands works correctly with patch files """ -ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" -CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" -SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" -FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" +ORG_SHA = "3dc7c14d29af40f1a0871a675364e437559d97a8" +CORRECT_SHA = "63e780200600e340365b669f9c673b670764c569" +SUCCEED_SHA = "0d0515c3f11266e1314e129bec3e308f804c8dc7" +FAIL_SHA = "cb64a5c1ef85619b89ab99dec2e9097fe84e1dc8" BISMARK_ALIGN = "bismark/align" REPO_NAME = "nf-core-test" PATCH_BRANCH = "patch-tester" @@ -76,11 +76,11 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + assert not (module_path / "bismark-align.diff").exists() # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None def test_create_patch_change(self): """Test creating a patch when there is a change to the module""" @@ -94,11 +94,11 @@ def test_create_patch_change(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -127,11 +127,11 @@ def test_create_patch_try_apply_successful(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -153,11 +153,11 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -195,11 +195,11 @@ def test_create_patch_try_apply_failed(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -234,11 +234,11 @@ def test_create_patch_update_success(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -254,13 +254,13 @@ def test_create_patch_update_success(self): assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) + ), modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file with open(module_path / patch_fn) as fh: @@ -295,11 +295,11 @@ def test_create_patch_update_fail(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -349,11 +349,11 @@ def test_remove_patch(self): # Check that a patch file with the correct name has been created patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -361,8 +361,8 @@ def test_remove_patch(self): mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + assert not (module_path / patch_fn).exists() # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/pipelines/__snapshots__/test_create_app.ambr b/tests/pipelines/__snapshots__/test_create_app.ambr deleted file mode 100644 index 6e3009e18e..0000000000 --- a/tests/pipelines/__snapshots__/test_create_app.ambr +++ /dev/null @@ -1,3321 +0,0 @@ -# serializer version: 1 -# name: test_basic_details_custom - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-corePipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_basic_details_nfcore - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                                   Pipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_choose_type - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Choose pipeline type - - - - - Choose "nf-core" if:Choose "Custom" if: - - ● You want your pipeline to be part of the ● Your pipeline will never be part of  - nf-core communitynf-core - ● You think that there's an outside chance ● You want full control over all features  - that it ever could be part of nf-corethat are included from the template  - (including those that are mandatory for  - nf-core). - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  nf-core  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Custom  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - What's the difference? - -   Choosing "nf-core" effectively pre-selects the following template features: - - ● GitHub Actions continuous-integration configuration files: - ▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) - ▪ Code formatting checks with Prettier - ▪ Auto-fix linting functionality using @nf-core-bot - ▪ Marking old issues as stale - ● Inclusion of shared nf-core configuration profiles - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_customisation_help - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use a GitHub Create a GitHub  Show help  - ▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing▄▄ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Hide help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - - Nf-core pipelines are configured to use a copy of the most common reference  - genome files. - - By selecting this option, your pipeline will include a configuration file  - specifying the paths to these files. - - The required code to use these files will also be included in the template.  - When the pipeline user provides an appropriate genome key, the pipeline will - automatically download the required reference files. - ▅▅ - For more information about reference genomes in nf-core pipelines, see the  - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_final_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Final details - - - - - First version of the pipelinePath to the output directory where the  - pipeline will be created - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - 1.0.0dev.                                          - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Finish  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_details - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - -   Now that we have created a new pipeline locally, we can create a new GitHub repository and push    -   the code to it. - - - - - Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - for login. Show  - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - GitHub username••••••••••••                   - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - The name of the organisation where the The name of the new GitHub repository - GitHub repo will be cretaed - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                               mypipeline                             - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - ⚠️ You can't create a repository directly in the nf-core organisation. - Please create the pipeline repo to an organisation where you have access or use your user  - account. A core-team member will be able to transfer the repo to nf-core once the development - has started. - - 💡 Your GitHub user account will be used by default if nf-core is given as the org name. - - - ▔▔▔▔▔▔▔▔Private - Select to make the new GitHub repo private. - ▁▁▁▁▁▁▁▁ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_exit_message - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - HowTo create a GitHub repository - - - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - -   If you would like to create the GitHub repository later, you can do it manually by following  -   these steps: - -  1. Create a new GitHub repository -  2. Add the remote to your local repository: - - - cd <pipeline_directory> - git remote add origin git@github.com:<username>/<repo_name>.git - - -  3. Push the code to the remote: - - - git push --all origin - - - 💡 Note the --all flag: this is needed to push all branches to the remote. - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Close  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_github_question - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Create GitHub repository - - -   After creating the pipeline template locally, we can create a GitHub repository and push the  -   code to it. - -   Do you want to create a GitHub repository? - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Create GitHub repo  Finish without creating a repo  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_custom - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use a GitHub Create a GitHub  Show help  - ▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github CI testsThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - actions for Continuous - Integration (CI)  - testing - ▃▃ - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most  - common reference  - genome files from  - iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add Github badgesThe README.md file of  Show help  - ▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - include GitHub badges - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Add configuration The pipeline will  Show help  - ▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - profiles containing  - custom parameters  - requried to run  - nf-core pipelines at  - different institutions - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use code lintersThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include code linters ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - and CI tests to lint  - your code: pre-commit, - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_nfcore - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Template features - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use reference genomesThe pipeline will be  Show help  - ▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - copy of the most common - reference genome files  - from iGenomes - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use multiqcThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - module which generates  - an HTML report for  - quality control. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use fastqcThe pipeline will  Show help  - ▁▁▁▁▁▁▁▁include the FastQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - module which performs  - quality control  - analysis of input FASTQ - files. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -         Use nf-schemaUse the nf-schema  Show help  - ▁▁▁▁▁▁▁▁Nextflow plugin for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - this pipeline. - - - - - - - - - - - - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Continue  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_type_nfcore_validation - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - - - Basic details - - - - - GitHub organisationWorkflow name - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - nf-core                                   Pipeline Name - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Must be lowercase without  - punctuation. - - - - A short description of your pipeline. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Description - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - - - Name of the main author / authors - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Author(s) - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - Value error, Cannot be left empty. - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Back  Next  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- -# name: test_welcome - ''' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nf-core create - - - - - - - - - - nf-core create — Create a new pipeline with the nf-core pipeline template - -                                           ,--./,-. -           ___     __   __   __   ___     /,-._.--~\  -     |\ | |__  __ /  ` /  \ |__) |__         }  { -     | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                           `._,._,' - - - - Welcome to the nf-core pipeline creation wizard - -   This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. - -   The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    -   pipelines. - - 💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with - the community as early as possible; ideally before you start on your pipeline! See the  - nf-core guidelines and the #new-pipelines Slack channel for more information. - - - ▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -  Let's go!  - ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - - - - - - - - - - - - - - - - - - - - - - - -  d Toggle dark mode  q Quit  - - - - - ''' -# --- diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg new file mode 100644 index 0000000000..f327dac799 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_custom.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-corePipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg new file mode 100644 index 0000000000..6a4e424130 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_basic_details_nfcore.svg @@ -0,0 +1,274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                                   Pipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg new file mode 100644 index 0000000000..3eaceeb477 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_choose_type.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Choose pipeline type + + + + +Choose "nf-core" if:Choose "Custom" if: + +● You want your pipeline to be part of the ● Your pipeline will never be part of  +nf-core communitynf-core +● You think that there's an outside chance ● You want full control over all features  +that it ever could be part of nf-corethat are included from the template  +(including those that are mandatory for  +nf-core). +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + nf-core  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Custom  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + +What's the difference? + +  Choosing "nf-core" effectively pre-selects the following template features: + +● GitHub Actions continuous-integration configuration files: +▪ Pipeline test runs: Small-scale (GitHub) and large-scale (AWS) +▪ Code formatting checks with Prettier +▪ Auto-fix linting functionality using @nf-core-bot +▪ Marking old issues as stale +● Inclusion of shared nf-core configuration profiles + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg new file mode 100644 index 0000000000..c34bd85230 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -0,0 +1,275 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous▅▅ +Integration (CI)  +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Hide help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg new file mode 100644 index 0000000000..74c232f747 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_final_details.svg @@ -0,0 +1,269 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Final details + + + + +First version of the pipelinePath to the output directory where the  +pipeline will be created +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +1.0.0dev.                                          +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Finish  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg new file mode 100644 index 0000000000..03eeaab0d1 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_details.svg @@ -0,0 +1,276 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Create GitHub repository + +  Now that we have created a new pipeline locally, we can create a new GitHub repository and push    +  the code to it. + + + + +Your GitHub usernameYour GitHub personal access token▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +for login. Show  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +GitHub username••••••••••••                   +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + +The name of the organisation where the The name of the new GitHub repository +GitHub repo will be created +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                               mypipeline                             +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + +⚠️ You can't create a repository directly in the nf-core organisation. +Please create the pipeline repo to an organisation where you have access or use your user  +account. A core-team member will be able to transfer the repo to nf-core once the development +has started. + +💡 Your GitHub user account will be used by default if nf-core is given as the org name. + + +▔▔▔▔▔▔▔▔Private +Select to make the new GitHub repo private. +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg new file mode 100644 index 0000000000..1be8c63f10 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_exit_message.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +HowTo create a GitHub repository + + + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +  If you would like to create the GitHub repository later, you can do it manually by following  +  these steps: + + 1. Create a new GitHub repository + 2. Add the remote to your local repository: + + +cd <pipeline_directory> +git remote add origin git@github.com:<username>/<repo_name>.git + + + 3. Push the code to the remote: + + +git push --all origin + + +💡 Note the --all flag: this is needed to push all branches to the remote. + + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Close  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg new file mode 100644 index 0000000000..8aad414e62 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_github_question.svg @@ -0,0 +1,265 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Create GitHub repository + + +  After creating the pipeline template locally, we can create a GitHub repository and push the  +  code to it. + +  Do you want to create a GitHub repository? + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Create GitHub repo  Finish without creating a repo  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg new file mode 100644 index 0000000000..b8dea05604 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -0,0 +1,274 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI) ▇▇ +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add configuration The pipeline will  Show help  +▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +profiles containing  +custom parameters  +required to run  +nf-core pipelines at  +different institutions + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use code lintersThe pipeline will  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg new file mode 100644 index 0000000000..c18bb31b8e --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore.svg @@ -0,0 +1,272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most common +reference genome files  +from iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use multiqcThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include the MultiQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +module which generates  +an HTML report for  +quality control. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use fastqcThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include the FastQC ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +module which performs  +quality control  +analysis of input FASTQ +files. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use nf-schemaUse the nf-schema  Show help  +▁▁▁▁▁▁▁▁Nextflow plugin for ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +this pipeline. + + + + + + + + + + + + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg new file mode 100644 index 0000000000..fd6f2532c8 --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_nfcore_validation.svg @@ -0,0 +1,273 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Basic details + + + + +GitHub organisationWorkflow name + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +nf-core                                   Pipeline Name +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Must be lowercase without  +punctuation. + + + +A short description of your pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Description +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Cannot be left empty. + + + +Name of the main author / authors + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +Author(s) +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +Value error, Cannot be left empty. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Next  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg new file mode 100644 index 0000000000..d9941b650d --- /dev/null +++ b/tests/pipelines/__snapshots__/test_create_app/test_welcome.svg @@ -0,0 +1,271 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + nf-core pipelines create + + + + + + + + + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\  +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + + + +Welcome to the nf-core pipeline creation wizard + +  This app will help you create a new Nextflow pipeline from the nf-core/tools pipeline template. + +  The template helps anyone benefit from nf-core best practices, and is a requirement for nf-core    +  pipelines. + +💡 If you want to add a pipeline to nf-core, please join on Slack and discuss your plans with +the community as early as possible; ideally before you start on your pipeline! See the  +nf-core guidelines and the #new-pipelines Slack channel for more information. + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Let's go!  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + + + + + + + + + + + + + + + + + + + + + + + + d Toggle dark mode  q Quit  a Toggle all  + + + diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py index 97dd346cdf..ebc529247e 100644 --- a/tests/pipelines/lint/test_files_exist.py +++ b/tests/pipelines/lint/test_files_exist.py @@ -1,5 +1,7 @@ from pathlib import Path +from ruamel.yaml import YAML + import nf_core.pipelines.lint from ..test_lint import TestLint @@ -9,17 +11,17 @@ class TestLintFilesExist(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_files_exist_missing_config(self): """Lint test: critical files missing FAIL""" Path(self.new_pipeline, "CHANGELOG.md").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `CHANGELOG.md`" in results["failed"] def test_files_exist_missing_main(self): @@ -27,31 +29,27 @@ def test_files_exist_missing_main(self): Path(self.new_pipeline, "main.nf").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `main.nf`" in results["warned"] def test_files_exist_deprecated_file(self): """Check whether deprecated file issues warning""" - nf = Path(self.new_pipeline, "parameters.settings.json") - nf.touch() + Path(self.new_pipeline, "parameters.settings.json").touch() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == ["File must be removed: `parameters.settings.json`"] def test_files_exist_pass(self): """Lint check should pass if all files are there""" - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == [] def test_files_exist_pass_conditional_nfschema(self): @@ -62,9 +60,58 @@ def test_files_exist_pass_conditional_nfschema(self): with open(Path(self.new_pipeline, "nextflow.config"), "w") as f: f.write(config) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.schema"] = "nf-core" - results = lint_obj.files_exist() + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.schema"] = "nf-core" + results = self.lint_obj.files_exist() assert results["failed"] == [] assert results["ignored"] == [] + + def test_files_exists_pass_nf_core_yml_config(self): + """Check if linting passes with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + """ + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `.github/CONTRIBUTING.md`" in results["ignored"] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] + + def test_files_exists_fail_nf_core_yml_config(self): + """Check if linting fails with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - CITATIONS.md + """ + + # remove CITATIONS.md + Path(self.new_pipeline, "CITATIONS.md").unlink() + assert self.lint_obj._load() + # test first if linting fails correctly + results = self.lint_obj.files_exist() + assert "File not found: `CITATIONS.md`" in results["failed"] + + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index 3cc9355452..f8c3c1f31f 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -6,7 +6,6 @@ import nf_core.pipelines.create.create import nf_core.pipelines.lint -from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -30,7 +29,6 @@ def test_default_values_match(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 - assert "Config default value correct: params.max_cpus" in str(result["passed"]) assert "Config default value correct: params.validate_params" in str(result["passed"]) def test_nextflow_config_bad_name_fail(self): @@ -71,18 +69,18 @@ def test_nextflow_config_missing_test_profile_failed(self): def test_default_values_fail(self): """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" - # Change the default value of max_cpus in nextflow.config + # Change the default value of max_multiqc_email_size in nextflow.config nf_conf_file = Path(self.new_pipeline) / "nextflow.config" with open(nf_conf_file) as f: content = f.read() - fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) + fail_content = re.sub(r"\bmax_multiqc_email_size\s*=\s*'25.MB'", "max_multiqc_email_size = '0'", content) with open(nf_conf_file, "w") as f: f.write(fail_content) - # Change the default value of max_memory in nextflow_schema.json + # Change the default value of custom_config_version in nextflow_schema.json nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" with open(nf_schema_file) as f: content = f.read() - fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) + fail_content = re.sub(r'"default": "master"', '"default": "main"', content) with open(nf_schema_file, "w") as f: f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) @@ -90,11 +88,11 @@ def test_default_values_fail(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 2 assert ( - "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." + "Config default value incorrect: `params.max_multiqc_email_size` is set as `25.MB` in `nextflow_schema.json` but is `0` in `nextflow.config`." in result["failed"] ) assert ( - "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." + "Config default value incorrect: `params.custom_config_version` is set as `main` in `nextflow_schema.json` but is `master` in `nextflow.config`." in result["failed"] ) @@ -103,14 +101,14 @@ def test_catch_params_assignment_in_main_nf(self): # Add parameter assignment in main.nf main_nf_file = Path(self.new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: - f.write("params.max_time = 42") + f.write("params.custom_config_base = 'test'") lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() - assert len(result["failed"]) == 1 + assert len(result["failed"]) == 2 assert ( - result["failed"][0] - == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." + result["failed"][1] + == "Config default value incorrect: `params.custom_config_base` is set as `https://raw.githubusercontent.com/nf-core/configs/master` in `nextflow_schema.json` but is `null` in `nextflow.config`." ) def test_allow_params_reference_in_main_nf(self): @@ -118,7 +116,7 @@ def test_allow_params_reference_in_main_nf(self): # Add parameter reference in main.nf main_nf_file = Path(self.new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: - f.write("params.max_time == 42") + f.write("params.custom_config_version == 'main'") lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() @@ -126,22 +124,30 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" - # Add max_cpus to the ignore list + valid_yaml = """ + nextflow_config: + - manifest.name + - config_defaults: + - params.custom_config_version + """ + # Add custom_config_version to the ignore list nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" - nf_core_yml = NFCoreYamlConfig( - repository_type="pipeline", lint={"nextflow_config": [{"config_defaults": ["params.max_cpus"]}]} - ) + + with open(nf_core_yml_path) as f: + nf_core_yml = yaml.safe_load(f) + nf_core_yml["lint"] = yaml.safe_load(valid_yaml) with open(nf_core_yml_path, "w") as f: - yaml.dump(nf_core_yml.model_dump(), f) + yaml.dump(nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() lint_obj._load_lint_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 - assert "Config default value correct: params.max_cpu" not in str(result["passed"]) - assert "Config default ignored: params.max_cpus" in str(result["ignored"]) + assert len(result["ignored"]) == 2 + assert "Config default value correct: params.custom_config_version" not in str(result["passed"]) + assert "Config default ignored: params.custom_config_version" in str(result["ignored"]) + assert "Config variable ignored: `manifest.name`" in str(result["ignored"]) def test_default_values_float(self): """Test comparing two float values.""" @@ -150,7 +156,9 @@ def test_default_values_float(self): with open(nf_conf_file) as f: content = f.read() fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, ) with open(nf_conf_file, "w") as f: f.write(fail_content) @@ -180,7 +188,9 @@ def test_default_values_float_fail(self): with open(nf_conf_file) as f: content = f.read() fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, ) with open(nf_conf_file, "w") as f: f.write(fail_content) diff --git a/tests/pipelines/lint/test_nfcore_yml.py b/tests/pipelines/lint/test_nfcore_yml.py index 955c00da81..2ac36ffe0c 100644 --- a/tests/pipelines/lint/test_nfcore_yml.py +++ b/tests/pipelines/lint/test_nfcore_yml.py @@ -1,8 +1,9 @@ -import re from pathlib import Path -import nf_core.pipelines.create +from ruamel.yaml import YAML + import nf_core.pipelines.lint +from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -11,11 +12,14 @@ class TestLintNfCoreYml(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() - self.nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + self.yaml = YAML() + self.nf_core_yml: NFCoreYamlConfig = self.yaml.load(self.nf_core_yml_path) + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_nfcore_yml_pass(self): """Lint test: nfcore_yml - PASS""" - self.lint_obj._load() + assert self.lint_obj._load() results = self.lint_obj.nfcore_yml() assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) @@ -27,31 +31,95 @@ def test_nfcore_yml_pass(self): def test_nfcore_yml_fail_repo_type(self): """Lint test: nfcore_yml - FAIL - repository type not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = content.replace("repository_type: pipeline", "repository_type: foo") - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 + self.nf_core_yml["repository_type"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + with self.assertRaises(AssertionError): + self.lint_obj._load() def test_nfcore_yml_fail_nfcore_version(self): """Lint test: nfcore_yml - FAIL - nf-core version not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() + self.nf_core_yml["nf_core_version"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) assert len(results.get("failed", [])) == 0 assert len(results.get("passed", [])) >= 0 assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + # modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + # template_strings: False + template_strings: + - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config_bool(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + # modules_config: + # - fastqc + merge_markers: False + # merge_markers: + # - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + # template_strings: + # - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_template_strings.py b/tests/pipelines/lint/test_template_strings.py index 406ba63e0c..37b7604806 100644 --- a/tests/pipelines/lint/test_template_strings.py +++ b/tests/pipelines/lint/test_template_strings.py @@ -1,6 +1,8 @@ import subprocess from pathlib import Path +import yaml + import nf_core.pipelines.create import nf_core.pipelines.lint @@ -11,6 +13,9 @@ class TestLintTemplateStrings(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + with open(self.nf_core_yml_path) as f: + self.nf_core_yml = yaml.safe_load(f) def test_template_strings(self): """Tests finding a template string in a file fails linting.""" @@ -28,9 +33,12 @@ def test_template_strings(self): def test_template_strings_ignored(self): """Tests ignoring template_strings""" # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings: False") + valid_yaml = """ + template_strings: false + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() lint_obj._lint_pipeline() @@ -43,13 +51,21 @@ def test_template_strings_ignore_file(self): txt_file = Path(self.new_pipeline) / "docs" / "test.txt" with open(txt_file, "w") as f: f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + valid_yaml = """ + template_strings: + - docs/test.txt + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() result = lint_obj.template_strings() + assert len(result["failed"]) == 0 assert len(result["ignored"]) == 1 diff --git a/tests/pipelines/test_bump_version.py b/tests/pipelines/test_bump_version.py index 709e82427d..8af5c0e4d1 100644 --- a/tests/pipelines/test_bump_version.py +++ b/tests/pipelines/test_bump_version.py @@ -13,12 +13,25 @@ def test_bump_pipeline_version(self): """Test that making a release with the working example files works""" # Bump the version number - nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1") + nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1.0") new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) # Check nextflow.config new_pipeline_obj.load_pipeline_config() - assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1" + assert new_pipeline_obj.nf_config["manifest.version"].strip("'\"") == "1.1.0" + + # Check multiqc_config.yml + with open(new_pipeline_obj._fp("assets/multiqc_config.yml")) as fh: + multiqc_config = yaml.safe_load(fh) + + assert "report_comment" in multiqc_config + assert "/releases/tag/1.1.0" in multiqc_config["report_comment"] + + # Check .nf-core.yml + with open(new_pipeline_obj._fp(".nf-core.yml")) as fh: + nf_core_yml = yaml.safe_load(fh) + if nf_core_yml["template"]: + assert nf_core_yml["template"]["version"] == "1.1.0" def test_dev_bump_pipeline_version(self): """Test that making a release works with a dev name and a leading v""" @@ -33,7 +46,7 @@ def test_dev_bump_pipeline_version(self): def test_bump_nextflow_version(self): # Bump the version number to a specific version, preferably one # we're not already on - version = "22.04.3" + version = "25.04.2" nf_core.pipelines.bump_version.bump_nextflow_version(self.pipeline_obj, version) new_pipeline_obj = nf_core.utils.Pipeline(self.pipeline_dir) new_pipeline_obj._load() diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index a898d37b70..d1e2c41a68 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -257,6 +257,141 @@ def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): not in download_obj.containers ) + # mock_seqera_container_oras.nf + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in download_obj.containers + assert "community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1" not in download_obj.containers + + # mock_seqera_container_oras_mulled.nf + assert ( + "oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3" + in download_obj.containers + ) + assert ( + "community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd" not in download_obj.containers + ) + + # mock_seqera_container_http.nf + assert ( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + in download_obj.containers + ) + + # ToDO: This URI should actually NOT be in there, but prioritize_direct_download() can not handle this case. + # + # It works purely by comparing the strings, thus can establish the equivalence of 'https://depot.galaxyproject.org/singularity/umi_tools:1.1.5--py39hf95cd2a_0' + # and 'biocontainers/umi_tools:1.1.5--py39hf95cd2a_0' because of the identical string 'umi_tools:1.1.5--py39hf95cd2a_0', but has no means to establish that + # 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' and + # 'community.wave.seqera.io/library/coreutils:9.5--ae99c88a9b28c264' are the equivalent container. It would need to query an API at Seqera for that. + + assert "community.wave.seqera.io/library/coreutils:9.5--ae99c88a9b28c264" in download_obj.containers + + # + # Test for 'prioritize_direct_download' + # + @with_temporary_folder + def test_prioritize_direct_download(self, tmp_path): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) + + # tests deduplication and https priority as well as Seqera Container exception + + test_container = [ + "https://depot.galaxyproject.org/singularity/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "biocontainers/umi-transfer:1.5.0--h715e4b3_0", + "https://depot.galaxyproject.org/singularity/umi-transfer:1.5.0--h715e4b3_0", + "biocontainers/umi-transfer:1.5.0--h715e4b3_0", + "quay.io/nf-core/sortmerna:4.3.7--6502243397c065ba", + "nf-core/sortmerna:4.3.7--6502243397c065ba", + "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_1", + "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_0", + "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", + ] + + result = download_obj.prioritize_direct_download(test_container) + + # Verify that the priority works for regular https downloads (https encountered first) + assert "https://depot.galaxyproject.org/singularity/ubuntu:22.04" in result + assert "nf-core/ubuntu:22.04" not in result + + # Verify that the priority works for regular https downloads (https encountered second) + assert "biocontainers/umi-transfer:1.5.0--h715e4b3_0" not in result + assert "https://depot.galaxyproject.org/singularity/umi-transfer:1.5.0--h715e4b3_0" in result + + # Verify that the priority works for images with and without explicit registry + # No priority here, though - the first is retained. + assert "nf-core/sortmerna:4.3.7--6502243397c065ba" in result + assert "quay.io/nf-core/sortmerna:4.3.7--6502243397c065ba" not in result + + # Verify that different versions of the same tool and different build numbers are retained + assert "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_1" in result + assert "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_0" in result + assert "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1" in result + + # Verify that Seqera containers are not deduplicated... + assert ( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data" + in result + ) + assert ( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + in result + ) + # ...but identical ones are. + assert ( + result.count( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + ) + == 1 + ) + + # + # Test for 'reconcile_seqera_container_uris' + # + @with_temporary_folder + def test_reconcile_seqera_container_uris(self, tmp_path): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) + + prioritized_container = [ + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", + "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649", + ] + + test_container = [ + "https://depot.galaxyproject.org/singularity/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81", + "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373", + "biocontainers/sylph:0.6.1--b97274cdc1caa649", + ] + + # test that the test_container list is returned as it is, if no prioritized_containers are specified + result_empty = download_obj.reconcile_seqera_container_uris([], test_container) + assert result_empty == test_container + + result = download_obj.reconcile_seqera_container_uris(prioritized_container, test_container) + + # Verify that unrelated images are retained + assert "https://depot.galaxyproject.org/singularity/ubuntu:22.04" in result + assert "nf-core/ubuntu:22.04" in result + + # Verify that the priority works for regular Seqera container (Native Singularity over Docker, but only for Seqera registry) + assert "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649" in result + assert "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373" not in result + assert "biocontainers/sylph:0.6.1--b97274cdc1caa649" in result + + # Verify that version strings are respected: Version 1.0.0 does not replace version 1.5.0 + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in result + assert "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81" in result + + # assert that the deduplication works + assert test_container.count("nf-core/ubuntu:22.04") == 3 + assert result.count("nf-core/ubuntu:22.04") == 1 + # # Tests for 'singularity_pull_image' # @@ -287,11 +422,30 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress ) + # Test successful pull with absolute oras:// URI + download_obj.singularity_pull_image( + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", + f"{tmp_dir}/umi-transfer-oras.sif", + None, + "docker.io", + mock_rich_progress, + ) + + # try pulling Docker container image with oras:// + with pytest.raises(ContainerError.NoSingularityContainerError): + download_obj.singularity_pull_image( + "oras://ghcr.io/matthiaszepper/umi-transfer:dev", + f"{tmp_dir}/umi-transfer-oras_impostor.sif", + None, + "docker.io", + mock_rich_progress, + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( "hello-world", - f"{tmp_dir}/hello-world_new.sif", + f"{tmp_dir}/break_the_registry_test.sif", None, "register-this-domain-to-break-the-test.io", mock_rich_progress, @@ -327,7 +481,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.InvalidTagError): download_obj.singularity_pull_image( "ewels/multiqc:go-rewrite", - f"{tmp_dir}/umi-transfer.sif", + f"{tmp_dir}/multiqc-go.sif", None, "ghcr.io", mock_rich_progress, @@ -376,10 +530,72 @@ def test_get_singularity_images(self, tmp_path, mock_fetch_wf_config): @mock.patch("os.symlink") @mock.patch("os.open") @mock.patch("os.close") - @mock.patch("re.sub") @mock.patch("os.path.basename") @mock.patch("os.path.dirname") def test_symlink_singularity_images( + self, + tmp_path, + mock_dirname, + mock_basename, + mock_close, + mock_open, + mock_symlink, + mock_makedirs, + ): + # Setup + mock_dirname.return_value = f"{tmp_path}/path/to" + mock_basename.return_value = "singularity-image.img" + mock_open.return_value = 12 # file descriptor + mock_close.return_value = 12 # file descriptor + + download_obj = DownloadWorkflow( + pipeline="dummy", + outdir=tmp_path, + container_library=( + "quay.io", + "community-cr-prod.seqera.io/docker/registry/v2", + "depot.galaxyproject.org/singularity", + ), + ) + + # Call the method + download_obj.symlink_singularity_images(f"{tmp_path}/path/to/singularity-image.img") + + # Check that os.makedirs was called with the correct arguments + mock_makedirs.assert_any_call(f"{tmp_path}/path/to", exist_ok=True) + + # Check that os.open was called with the correct arguments + mock_open.assert_any_call(f"{tmp_path}/path/to", os.O_RDONLY) + + # Check that os.symlink was called with the correct arguments + expected_calls = [ + mock.call( + "./singularity-image.img", + "./quay.io-singularity-image.img", + dir_fd=12, + ), + mock.call( + "./singularity-image.img", + "./community-cr-prod.seqera.io-docker-registry-v2-singularity-image.img", + dir_fd=12, + ), + mock.call( + "./singularity-image.img", + "./depot.galaxyproject.org-singularity-singularity-image.img", + dir_fd=12, + ), + ] + mock_symlink.assert_has_calls(expected_calls, any_order=True) + + @with_temporary_folder + @mock.patch("os.makedirs") + @mock.patch("os.symlink") + @mock.patch("os.open") + @mock.patch("os.close") + @mock.patch("re.sub") + @mock.patch("os.path.basename") + @mock.patch("os.path.dirname") + def test_symlink_singularity_images_registry( self, tmp_path, mock_dirname, @@ -400,23 +616,22 @@ def test_symlink_singularity_images( download_obj = DownloadWorkflow( pipeline="dummy", outdir=tmp_path, - container_library=("mirage-the-imaginative-registry.io", "quay.io"), + container_library=("quay.io", "community-cr-prod.seqera.io/docker/registry/v2"), ) - # Call the method + download_obj.registry_set = {"quay.io", "community-cr-prod.seqera.io/docker/registry/v2"} + + # Call the method with registry - should not happen, but preserve it then. download_obj.symlink_singularity_images(f"{tmp_path}/path/to/quay.io-singularity-image.img") print(mock_resub.call_args) # Check that os.makedirs was called with the correct arguments mock_makedirs.assert_any_call(f"{tmp_path}/path/to", exist_ok=True) - # Check that os.open was called with the correct arguments - mock_open.assert_called_once_with(f"{tmp_path}/path/to", os.O_RDONLY) - # Check that os.symlink was called with the correct arguments - mock_symlink.assert_any_call( + mock_symlink.assert_called_with( "./quay.io-singularity-image.img", - "./mirage-the-imaginative-registry.io-quay.io-singularity-image.img", + "./community-cr-prod.seqera.io-docker-registry-v2-singularity-image.img", dir_fd=12, ) # Check that there is no attempt to symlink to itself (test parameters would result in that behavior if not checked in the function) @@ -425,6 +640,10 @@ def test_symlink_singularity_images( not in mock_symlink.call_args_list ) + # Normally it would be called for each registry, but since quay.io is part of the name, it + # will only be called once, as no symlink to itself must be created. + mock_open.assert_called_once_with(f"{tmp_path}/path/to", os.O_RDONLY) + # # Test for gather_registries' # @@ -446,10 +665,16 @@ def test_gather_registries(self, tmp_path, mock_fetch_wf_config): download_obj.gather_registries(tmp_path) assert download_obj.registry_set assert isinstance(download_obj.registry_set, set) - assert len(download_obj.registry_set) == 6 + assert len(download_obj.registry_set) == 8 assert "quay.io" in download_obj.registry_set # default registry, if no container library is provided. - assert "depot.galaxyproject.org" in download_obj.registry_set # default registry, often hardcoded in modules + assert ( + "depot.galaxyproject.org/singularity" in download_obj.registry_set + ) # default registry, often hardcoded in modules + assert "community.wave.seqera.io/library" in download_obj.registry_set # Seqera containers Docker + assert ( + "community-cr-prod.seqera.io/docker/registry/v2" in download_obj.registry_set + ) # Seqera containers Singularity https:// download assert "apptainer-registry.io" in download_obj.registry_set assert "docker.io" in download_obj.registry_set assert "podman-registry.io" in download_obj.registry_set @@ -483,7 +708,14 @@ def test_singularity_image_filenames(self, tmp_path): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) download_obj.outdir = tmp_path download_obj.container_cache_utilisation = "amend" - download_obj.registry_set = {"docker.io", "quay.io", "depot.galaxyproject.org"} + + download_obj.registry_set = { + "docker.io", + "quay.io", + "depot.galaxyproject.org/singularity", + "community.wave.seqera.io/library", + "community-cr-prod.seqera.io/docker/registry/v2", + } ## Test phase I: Container not yet cached, should be amended to cache # out_path: str, Path to cache @@ -501,11 +733,13 @@ def test_singularity_image_filenames(self, tmp_path): self.assertTrue(all((isinstance(element, str), element is None) for element in result)) # assert that the correct out_path is returned that points to the cache - assert result[0].endswith("/cachedir/singularity-bbmap-38.93--he522d1c_0.img") + assert result[0].endswith("/cachedir/bbmap-38.93--he522d1c_0.img") ## Test phase II: Test various container names # out_path: str, Path to cache # cache_path: None + + # Test --- mulled containers # result = download_obj.singularity_image_filenames( "quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:59cdd445419f14abac76b31dd0d71217994cbcc9-0" ) @@ -513,10 +747,33 @@ def test_singularity_image_filenames(self, tmp_path): "/cachedir/biocontainers-mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2-59cdd445419f14abac76b31dd0d71217994cbcc9-0.img" ) + # Test --- Docker containers without registry # result = download_obj.singularity_image_filenames("nf-core/ubuntu:20.04") assert result[0].endswith("/cachedir/nf-core-ubuntu-20.04.img") - ## Test phase III: Container wil lbe cached but also copied to out_path + # Test --- Docker container with explicit registry -> should be trimmed # + result = download_obj.singularity_image_filenames("docker.io/nf-core/ubuntu:20.04") + assert result[0].endswith("/cachedir/nf-core-ubuntu-20.04.img") + + # Test --- Docker container with explicit registry not in registry set -> can't be trimmed + result = download_obj.singularity_image_filenames("mirage-the-imaginative-registry.io/nf-core/ubuntu:20.04") + assert result[0].endswith("/cachedir/mirage-the-imaginative-registry.io-nf-core-ubuntu-20.04.img") + + # Test --- Seqera Docker containers: Trimmed, because it is hard-coded in the registry set. + result = download_obj.singularity_image_filenames( + "community.wave.seqera.io/library/coreutils:9.5--ae99c88a9b28c264" + ) + assert result[0].endswith("/cachedir/coreutils-9.5--ae99c88a9b28c264.img") + + # Test --- Seqera Singularity containers: Trimmed, because it is hard-coded in the registry set. + result = download_obj.singularity_image_filenames( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + ) + assert result[0].endswith( + "cachedir/blobs-sha256-c2-c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975-data.img" + ) + + ## Test phase III: Container will be cached but also copied to out_path # out_path: str, Path to cache # cache_path: str, Path to cache download_obj.container_cache_utilisation = "copy" @@ -525,8 +782,8 @@ def test_singularity_image_filenames(self, tmp_path): ) self.assertTrue(all(isinstance(element, str) for element in result)) - assert result[0].endswith("/singularity-images/singularity-bbmap-38.93--he522d1c_0.img") - assert result[1].endswith("/cachedir/singularity-bbmap-38.93--he522d1c_0.img") + assert result[0].endswith("/singularity-images/bbmap-38.93--he522d1c_0.img") + assert result[1].endswith("/cachedir/bbmap-38.93--he522d1c_0.img") ## Test phase IV: Expect an error if no NXF_SINGULARITY_CACHEDIR is defined os.environ["NXF_SINGULARITY_CACHEDIR"] = "" diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index d63e7b6dc5..ed23872f66 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -101,7 +101,12 @@ def test_ob_to_questionary_string(self): "default": "data/*{1,2}.fastq.gz", } result = self.launcher.single_param_to_questionary("input", sc_obj) - assert result == {"type": "input", "name": "input", "message": "", "default": "data/*{1,2}.fastq.gz"} + assert result == { + "type": "input", + "name": "input", + "message": "", + "default": "data/*{1,2}.fastq.gz", + } @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): @@ -123,7 +128,8 @@ def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): assert exc_info.value.args[0].startswith("Web launch response not recognised:") @mock.patch( - "nf_core.utils.poll_nfcore_web_api", side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}] + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}], ) @mock.patch("webbrowser.open") @mock.patch("nf_core.utils.wait_cli_function") @@ -133,7 +139,10 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa self.launcher.merge_nxf_flag_schema() assert self.launcher.launch_web_gui() is None - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "error", "message": "foo"}], + ) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status error""" with pytest.raises(AssertionError) as exc_info: @@ -147,12 +156,18 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): self.launcher.get_web_launch_response() assert exc_info.value.args[0].startswith("Web launch GUI returned unexpected status (foo): ") - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "waiting_for_user"}], + ) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status waiting_for_user""" assert self.launcher.get_web_launch_response() is False - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "launch_params_complete"}], + ) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - complete, but missing keys""" with pytest.raises(AssertionError) as exc_info: @@ -185,11 +200,9 @@ def test_sanitise_web_response(self): self.launcher.get_pipeline_schema() self.launcher.nxf_flags["-name"] = "" self.launcher.schema_obj.input_params["igenomes_ignore"] = "true" - self.launcher.schema_obj.input_params["max_cpus"] = "12" self.launcher.sanitise_web_response() assert "-name" not in self.launcher.nxf_flags assert self.launcher.schema_obj.input_params["igenomes_ignore"] is True - assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): """Check converting a python dict to a pyenquirer format - booleans""" @@ -262,7 +275,10 @@ def test_ob_to_questionary_enum(self): def test_ob_to_questionary_pattern(self): """Check converting a python dict to a questionary format - with pattern""" - sc_obj = {"type": "string", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"} + sc_obj = { + "type": "string", + "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", + } result = self.launcher.single_param_to_questionary("email", sc_obj) assert result["type"] == "input" assert result["validate"]("test@email.com") is True @@ -282,7 +298,7 @@ def test_strip_default_params(self): assert self.launcher.schema_obj.input_params == {"input": "custom_input"} def test_build_command_empty(self): - """Test the functionality to build a nextflow command - nothing customsied""" + """Test the functionality to build a nextflow command - nothing customised""" self.launcher.get_pipeline_schema() self.launcher.merge_nxf_flag_schema() self.launcher.build_command() diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 9ca29d249f..ca7353d50d 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -48,7 +48,8 @@ def test_init_pipeline_lint(self): def test_load_lint_config_not_found(self): """Try to load a linting config file that doesn't exist""" assert self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} + assert self.lint_obj.lint_config is not None + assert self.lint_obj.lint_config.model_dump(exclude_none=True) == {} def test_load_lint_config_ignore_all_tests(self): """Try to load a linting config file that ignores all tests""" @@ -64,7 +65,8 @@ def test_load_lint_config_ignore_all_tests(self): # Load the new lint config file and check lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + assert lint_obj.lint_config is not None + assert sorted(list(lint_obj.lint_config.model_dump(exclude_none=True))) == sorted(lint_obj.lint_tests) # Try running linting and make sure that all tests are ignored lint_obj._lint_pipeline() diff --git a/tests/pipelines/test_params_file.py b/tests/pipelines/test_params_file.py index 22a6182acd..0450d3f99d 100644 --- a/tests/pipelines/test_params_file.py +++ b/tests/pipelines/test_params_file.py @@ -1,79 +1,67 @@ import json -import os -import shutil -import tempfile from pathlib import Path -import nf_core.pipelines.create.create -import nf_core.pipelines.schema from nf_core.pipelines.params_file import ParamsFileBuilder +from ..test_pipelines import TestPipelines -class TestParamsFileBuilder: + +class TestParamsFileBuilder(TestPipelines): """Class for schema tests""" - @classmethod - def setup_class(cls): + def setUp(self): """Create a new PipelineSchema object""" - cls.schema_obj = nf_core.pipelines.schema.PipelineSchema() - cls.root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - - # Create a test pipeline in temp directory - cls.tmp_dir = tempfile.mkdtemp() - cls.template_dir = Path(cls.tmp_dir, "wf") - create_obj = nf_core.pipelines.create.create.PipelineCreate( - "testpipeline", "a description", "Me", outdir=cls.template_dir, no_git=True - ) - create_obj.init_pipeline() - - cls.template_schema = Path(cls.template_dir, "nextflow_schema.json") - cls.params_template_builder = ParamsFileBuilder(cls.template_dir) - cls.invalid_template_schema = Path(cls.template_dir, "nextflow_schema_invalid.json") - - # Remove the allOf section to make the schema invalid - with open(cls.template_schema) as fh: - o = json.load(fh) - del o["allOf"] - - with open(cls.invalid_template_schema, "w") as fh: - json.dump(o, fh) - - @classmethod - def teardown_class(cls): - if Path(cls.tmp_dir).exists(): - shutil.rmtree(cls.tmp_dir) + super().setUp() + + self.template_schema = Path(self.pipeline_dir, "nextflow_schema.json") + self.params_template_builder = ParamsFileBuilder(self.pipeline_dir) + self.outfile = Path(self.pipeline_dir, "params-file.yml") def test_build_template(self): - outfile = Path(self.tmp_dir, "params-file.yml") - self.params_template_builder.write_params_file(str(outfile)) + self.params_template_builder.write_params_file(self.outfile) - assert outfile.exists() + assert self.outfile.exists() - with open(outfile) as fh: + with open(self.outfile) as fh: out = fh.read() assert "nf-core/testpipeline" in out - def test_build_template_invalid_schema(self, caplog): + def test_build_template_invalid_schema(self): """Build a schema from a template""" - outfile = Path(self.tmp_dir, "params-file-invalid.yml") - builder = ParamsFileBuilder(self.invalid_template_schema) - res = builder.write_params_file(str(outfile)) + schema = {} + with open(self.template_schema) as fh: + schema = json.load(fh) + del schema["allOf"] + + with open(self.template_schema, "w") as fh: + json.dump(schema, fh) + + builder = ParamsFileBuilder(self.template_schema) + res = builder.write_params_file(self.outfile) assert res is False - assert "Pipeline schema file is invalid" in caplog.text + assert "Pipeline schema file is invalid" in self.caplog.text - def test_build_template_file_exists(self, caplog): + def test_build_template_file_exists(self): """Build a schema from a template""" # Creates a new empty file - outfile = Path(self.tmp_dir) / "params-file.yml" - with open(outfile, "w"): - pass + self.outfile.touch() - res = self.params_template_builder.write_params_file(outfile) + res = self.params_template_builder.write_params_file(self.outfile) assert res is False - assert f"File '{outfile}' exists!" in caplog.text + assert f"File '{self.outfile}' exists!" in self.caplog.text + + self.outfile.unlink() - outfile.unlink() + def test_build_template_content(self): + """Test that the content of the params file is correct""" + self.params_template_builder.write_params_file(self.outfile) + + with open(self.outfile) as fh: + out = fh.read() + + assert "nf-core/testpipeline" in out + assert "# input = null" in out diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py new file mode 100644 index 0000000000..01a77ecd76 --- /dev/null +++ b/tests/pipelines/test_rocrate.py @@ -0,0 +1,127 @@ +"""Test the nf-core pipelines rocrate command""" + +import shutil +import tempfile +from pathlib import Path + +import git +import rocrate.rocrate +from git import Repo + +import nf_core.pipelines.create +import nf_core.pipelines.create.create +import nf_core.pipelines.rocrate +import nf_core.utils + +from ..test_pipelines import TestPipelines + + +class TestROCrate(TestPipelines): + """Class for lint tests""" + + def setUp(self) -> None: + super().setUp() + # add fake metro map + Path(self.pipeline_dir, "docs", "images", "nf-core-testpipeline_metro_map.png").touch() + # commit the changes + repo = Repo(self.pipeline_dir) + repo.git.add(A=True) + repo.index.commit("Initial commit") + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) + + def tearDown(self): + """Clean up temporary files and folders""" + + if self.tmp_dir.exists(): + shutil.rmtree(self.tmp_dir) + + def test_rocrate_creation(self): + """Run the nf-core rocrate command""" + + # Run the command + self.rocrate_obj + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate-metadata.json").exists()) + + # Check that the entries in the crate are correct + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) + entities = crate.get_entities() + + # Check if the correct entities are set: + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "./": + self.assertEqual(entity_json.get("name"), "nf-core/testpipeline") + self.assertEqual(entity_json["mainEntity"], {"@id": "main.nf"}) + elif entity_json["@id"] == "#main.nf": + self.assertEqual(entity_json["programmingLanguage"], [{"@id": "#nextflow"}]) + self.assertEqual(entity_json["image"], [{"@id": "nf-core-testpipeline_metro_map.png"}]) + # assert there is a metro map + # elif entity_json["@id"] == "nf-core-testpipeline_metro_map.png": # FIXME waiting for https://github.com/ResearchObject/ro-crate-py/issues/174 + # self.assertEqual(entity_json["@type"], ["File", "ImageObject"]) + # assert that author is set as a person + elif "name" in entity_json and entity_json["name"] == "Test McTestFace": + self.assertEqual(entity_json["@type"], "Person") + # check that it is set as author of the main entity + if crate.mainEntity is not None: + self.assertEqual(crate.mainEntity["author"][0].id, entity_json["@id"]) + + def test_rocrate_creation_wrong_pipeline_dir(self): + """Run the nf-core rocrate command with a wrong pipeline directory""" + # Run the command + + # Check that it raises a UserWarning + with self.assertRaises(UserWarning): + nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir / "bad_dir") + + # assert that the crate was not created + self.assertFalse(Path(self.pipeline_dir / "bad_dir", "ro-crate-metadata.json").exists()) + + def test_rocrate_creation_with_wrong_version(self): + """Run the nf-core rocrate command with a pipeline version""" + # Run the command + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_without_git(self): + """Run the nf-core rocrate command with a pipeline version""" + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + # remove git repo + shutil.rmtree(self.pipeline_dir / ".git") + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_to_zip(self): + """Run the nf-core rocrate command with a zip output""" + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, zip_path=self.pipeline_dir) + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate.crate.zip").exists()) + + def test_rocrate_creation_for_fetchngs(self): + """Run the nf-core rocrate command with nf-core/fetchngs""" + tmp_dir = Path(tempfile.mkdtemp()) + # git clone nf-core/fetchngs + git.Repo.clone_from("https://github.com/nf-core/fetchngs", tmp_dir / "fetchngs") + # Run the command + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(tmp_dir / "fetchngs", version="1.12.0") + assert self.rocrate_obj.create_rocrate(tmp_dir / "fetchngs", self.pipeline_dir) + + # Check that Sateesh Peri is mentioned in creator field + + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) + entities = crate.get_entities() + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "#main.nf": + assert "https://orcid.org/0000-0002-9879-9070" in entity_json["creator"] + + # Clean up + shutil.rmtree(tmp_dir) diff --git a/tests/pipelines/test_schema.py b/tests/pipelines/test_schema.py index 2abaf07bd2..ab543d8b90 100644 --- a/tests/pipelines/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -49,7 +49,7 @@ def test_load_lint_schema(self): self.schema_obj.load_lint_schema() def test_load_lint_schema_nofile(self): - """Check that linting raises properly if a non-existant file is given""" + """Check that linting raises properly if a non-existent file is given""" with pytest.raises(RuntimeError): self.schema_obj.get_schema_path("fake_file") diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index ffbe75510b..8bf8a3c4ec 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -56,6 +56,8 @@ def mocked_requests_get(url) -> MockResponse: for branch_no in range(3, 7) ] return MockResponse(response_data, 200, url) + if url == "https://nf-co.re/pipelines.json": + return MockResponse({"remote_workflows": [{"name": "testpipeline", "topics": ["test", "pipeline"]}]}, 200, url) return MockResponse([{"html_url": url}], 404, url) @@ -398,3 +400,33 @@ def test_reset_target_dir_fake_branch(self): with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") + + def test_sync_no_changes(self): + """Test pipeline sync when no changes are needed""" + with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( + "requests.post", side_effect=mocked_requests_post + ) as mock_post: + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + + # Mock that no changes were made + psync.made_changes = False + + # Run sync + psync.sync() + + # Verify no PR was created + mock_post.assert_not_called() + + def test_sync_no_github_token(self): + """Test sync fails appropriately when GitHub token is missing""" + # Ensure GitHub token is not set + if "GITHUB_AUTH_TOKEN" in os.environ: + del os.environ["GITHUB_AUTH_TOKEN"] + + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir, make_pr=True) + psync.made_changes = True # Force changes to trigger PR attempt + + # Run sync and check for appropriate error + with self.assertRaises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: + psync.sync() + self.assertIn("GITHUB_AUTH_TOKEN not set!", str(exc_info.exception)) diff --git a/tests/subworkflows/test_lint.py b/tests/subworkflows/test_lint.py index 56574b865c..d94b55b3d3 100644 --- a/tests/subworkflows/test_lint.py +++ b/tests/subworkflows/test_lint.py @@ -63,6 +63,14 @@ def test_subworkflows_lint_multiple_remotes(self): assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 + def test_subworkflows_lint_update_meta_yml(self): + """update the meta.yml of a subworkflow""" + subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules, fix=True) + subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.passed) > 0 + assert len(subworkflow_lint.warned) >= 0 + def test_subworkflows_lint_snapshot_file(self): """Test linting a subworkflow with a snapshot file""" subworkflow_lint = nf_core.subworkflows.SubworkflowLint(directory=self.nfcore_modules) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py new file mode 100644 index 0000000000..5bb6a6798e --- /dev/null +++ b/tests/subworkflows/test_patch.py @@ -0,0 +1,307 @@ +import os +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.components.components_command +import nf_core.components.patch +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_REPO, GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + +OLD_SHA = "dbb12457e32d3da8eea7dc4ae096201fff4747c5" +SUCCEED_SHA = "0a33e6a0d730ad22a0ec9f7f9a7540af6e943221" +FAIL_SHA = "b6e5e8739de9a1a0c4f85267144e43dbaf8f1461" + + +class TestSubworkflowsPatch(TestSubworkflows): + """ + Test the 'nf-core subworkflows patch' command + """ + + def modify_main_nf(self, path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - ch_fasta // channel: [ fasta ] + for line_index in range(len(lines)): + if lines[line_index] == " ch_fasta // channel: [ fasta ]\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + + def setup_patch(self, pipeline_dir, modify_subworkflow): + # Install the subworkflow bam_sort_stats_samtools + install_obj = nf_core.subworkflows.SubworkflowInstall( + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + sha=OLD_SHA, + ) + + # Install the module + install_obj.install("bam_sort_stats_samtools") + + if modify_subworkflow: + # Modify the subworkflow + subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + self.modify_main_nf(subworkflow_path / "main.nf") + + def test_create_patch_no_change(self): + """Test creating a patch when there is a change to the module""" + self.setup_patch(self.pipeline_dir, False) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that no patch file has been added to the directory + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + def test_create_patch_change(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + print(patch_lines) + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + def test_create_patch_try_apply_successful(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", OLD_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is True + ) + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(subworkflow_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert "- ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is False + ) + + def test_create_patch_update_success(self): + """ + Test creating a patch file and the updating the subworkflow + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Update the subworkflow + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=OLD_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + assert update_obj.update("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn), modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) + + # Check that the correct lines are in the patch file + with open(swf_path / patch_fn) as fh: + patch_lines = fh.readlines() + swf_relpath = swf_path.relative_to(self.pipeline_dir) + assert f"--- {swf_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {swf_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(swf_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # this line should have been removed by the patch + assert " ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and updating a subworkflow when there is a diff conflict + """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Save the file contents for downstream comparison + with open(swf_path / patch_fn) as fh: + patch_contents = fh.read() + + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=FAIL_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + update_obj.update("bam_sort_stats_samtools") + + # Check that the installed files have not been affected by the attempted patch + temp_dir = Path(tempfile.mkdtemp()) + nf_core.components.components_command.ComponentCommand( + "subworkflows", self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH + ).install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, temp_dir) + + temp_module_dir = temp_dir / "bam_sort_stats_samtools" + for file in os.listdir(temp_module_dir): + assert file in os.listdir(swf_path) + with open(swf_path / file) as fh: + installed = fh.read() + with open(temp_module_dir / file) as fh: + shouldbe = fh.read() + assert installed == shouldbe + + # Check that the patch file is unaffected + with open(swf_path / patch_fn) as fh: + new_patch_contents = fh.read() + assert patch_contents == new_patch_contents + + def test_remove_patch(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove("bam_sort_stats_samtools") + # Check that the diff file has been removed + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 423eb516bc..63cc7c0efc 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -99,7 +99,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): with open(patch_path) as fh: line = fh.readline() assert line.startswith( - "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + "Changes in component 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" ) def test_install_at_hash_and_update_and_save_diff_limit_output(self): diff --git a/tests/test_cli.py b/tests/test_cli.py index 026efd1e6a..8df1e210b0 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -167,7 +167,7 @@ def test_cli_download(self, mock_dl): "compress": "tar.gz", "force": None, "platform": None, - "download-configuration": None, + "download-configuration": "yes", "tag": "3.12=testing", "container-system": "singularity", "container-library": "quay.io", @@ -188,7 +188,7 @@ def test_cli_download(self, mock_dl): params["compress"], "force" in params, "platform" in params, - "download-configuration" in params, + params["download-configuration"], (params["tag"],), params["container-system"], (params["container-library"],), @@ -358,7 +358,7 @@ def test_schema_lint(self, mock_get_schema_path): with open("nextflow_schema.json", "w") as f: f.write("{}") self.invoke_cli(cmd) - mock_get_schema_path.assert_called_with("nextflow_schema.json") + mock_get_schema_path.assert_called_with(Path("nextflow_schema.json")) @mock.patch("nf_core.pipelines.schema.PipelineSchema.get_schema_path") def test_schema_lint_filename(self, mock_get_schema_path): @@ -368,7 +368,7 @@ def test_schema_lint_filename(self, mock_get_schema_path): with open("some_other_filename", "w") as f: f.write("{}") self.invoke_cli(cmd) - mock_get_schema_path.assert_called_with("some_other_filename") + mock_get_schema_path.assert_called_with(Path("some_other_filename")) @mock.patch("nf_core.pipelines.create_logo.create_logo") def test_create_logo(self, mock_create_logo): diff --git a/tests/test_modules.py b/tests/test_modules.py index 0e16497176..d0692236e8 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -7,7 +7,7 @@ import pytest import requests_cache import responses -import yaml +import ruamel.yaml import nf_core.modules import nf_core.modules.create @@ -16,6 +16,7 @@ import nf_core.modules.remove import nf_core.pipelines.create.create from nf_core import __version__ +from nf_core.pipelines.lint_utils import run_prettier_on_file from nf_core.utils import NFCoreYamlConfig from .utils import ( @@ -28,11 +29,15 @@ create_tmp_pipeline, mock_anaconda_api_calls, mock_biocontainers_api_calls, + mock_biotools_api_calls, ) def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=2, offset=0) root_dir = Path(tmp_dir, "modules") Path(root_dir, "modules", "nf-core").mkdir(parents=True) @@ -42,13 +47,14 @@ def create_modules_repo_dummy(tmp_dir): nf_core_yml = NFCoreYamlConfig(nf_core_version=__version__, repository_type="modules", org_path="nf-core") with open(Path(root_dir, ".nf-core.yml"), "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh) - # mock biocontainers and anaconda response + # mock biocontainers and anaconda response and biotools response with responses.RequestsMock() as rsps: mock_anaconda_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") mock_biocontainers_api_calls(rsps, "bpipe", "0.9.13--hdfd78af_0") + mock_biotools_api_calls(rsps, "bpipe") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules module_create = nf_core.modules.create.ModuleCreate( - root_dir, "bpipe/test", "@author", "process_single", False, False + root_dir, "bpipe/test", "@author", "process_single", True, False ) with requests_cache.disabled(): assert module_create.create() @@ -57,10 +63,11 @@ def create_modules_repo_dummy(tmp_dir): meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") with open(str(meta_yml_path)) as fh: - meta_yml = yaml.safe_load(fh) + meta_yml = yaml.load(fh) del meta_yml["tools"][0]["bpipe"]["doi"] with open(str(meta_yml_path), "w") as fh: yaml.dump(meta_yml, fh) + run_prettier_on_file(fh.name) # Add dummy content to main.nf.test.snap test_snap_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") diff --git a/tests/test_pipelines.py b/tests/test_pipelines.py index 656ccbef55..455b2b71c2 100644 --- a/tests/test_pipelines.py +++ b/tests/test_pipelines.py @@ -1,6 +1,8 @@ import shutil from unittest import TestCase +import pytest + from nf_core.utils import Pipeline from .utils import create_tmp_pipeline @@ -24,3 +26,7 @@ def _make_pipeline_copy(self): new_pipeline = self.tmp_dir / "nf-core-testpipeline-copy" shutil.copytree(self.pipeline_dir, new_pipeline) return new_pipeline + + @pytest.fixture(autouse=True) + def _use_caplog(self, caplog): + self.caplog = caplog diff --git a/tests/test_utils.py b/tests/test_utils.py index bde561d95e..b13c8eb37d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -113,7 +113,7 @@ def test_pip_package_pass(self): @mock.patch("requests.get") def test_pip_package_timeout(self, mock_get): """Tests the PyPi connection and simulates a request timeout, which should - return in an addiional warning in the linting""" + return in an additional warning in the linting""" # Define the behaviour of the request get mock mock_get.side_effect = requests.exceptions.Timeout() # Now do the test diff --git a/tests/utils.py b/tests/utils.py index 9a661c5927..cffe8ba103 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -100,13 +100,22 @@ def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module: str, vers rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200) +def mock_biotools_api_calls(rsps: responses.RequestsMock, module: str) -> None: + """Mock biotools api calls for module""" + biotools_api_url = f"https://bio.tools/api/t/?q={module}&format=json" + biotools_mock = { + "list": [{"name": "Bpipe", "biotoolsCURIE": "biotools:bpipe"}], + } + rsps.get(biotools_api_url, json=biotools_mock, status=200) + + def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: """Create a new Pipeline for testing""" tmp_dir = Path(tempfile.TemporaryDirectory().name) root_repo_dir = Path(__file__).resolve().parent.parent template_dir = root_repo_dir / "nf_core" / "pipeline-template" - pipeline_name = "mypipeline" + pipeline_name = "testpipeline" pipeline_dir = tmp_dir / pipeline_name pipeline_dir.mkdir(parents=True) @@ -116,7 +125,7 @@ def create_tmp_pipeline(no_git: bool = False) -> Tuple[Path, Path, str, Path]: org_path="nf-core", lint=None, template=NFCoreTemplateConfig( - name="mypipeline", + name="testpipeline", author="me", description="it is mine", org="nf-core",