diff --git a/.editorconfig b/.editorconfig
index 014c2383bd..449f446a3b 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -10,3 +10,13 @@ indent_style = space
[*.{md,yml,yaml,html,css,scss,js,cff}]
indent_size = 2
+
+# ignore python and markdown files
+[*.py]
+indent_style = unset
+
+[**/{CONTRIBUTING,README}.md]
+indent_style = unset
+
+[**/Makefile]
+indent_style = unset
diff --git a/.github/.coveragerc b/.github/.coveragerc
index 522a29eb62..24a419ae07 100644
--- a/.github/.coveragerc
+++ b/.github/.coveragerc
@@ -1,2 +1,5 @@
[run]
-omit = nf_core/pipeline-template/*
+omit = nf_core/*-template/*
+source = nf_core
+relative_files = True
+
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index 75da414db6..04d327bd8c 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -35,45 +35,28 @@ pip install -e .
## Code formatting
-### Black
+### Ruff
-All Python code in nf-core/tools must be passed through the [Black Python code formatter](https://black.readthedocs.io/en/stable/).
+All Python code in nf-core/tools must be passed through the [Ruff code linter and formatter](https://github.com/astral-sh/ruff).
This ensures a harmonised code formatting style throughout the package, from all contributors.
-You can run Black on the command line (it's included in `requirements-dev.txt`) - eg. to run recursively on the whole repository:
+You can run Ruff on the command line (it's included in `requirements-dev.txt`) - eg. to run recursively on the whole repository:
```bash
-black .
+ruff format .
```
-Alternatively, Black has [integrations for most common editors](https://black.readthedocs.io/en/stable/editor_integration.html)
+Alternatively, Ruff has [integrations for most common editors](https://github.com/astral-sh/ruff-lsp) and VSCode(https://github.com/astral-sh/ruff-vscode)
to automatically format code when you hit save.
-You can also set it up to run when you [make a commit](https://black.readthedocs.io/en/stable/version_control_integration.html).
There is an automated CI check that runs when you open a pull-request to nf-core/tools that will fail if
-any code does not adhere to Black formatting.
+any code does not adhere to Ruff formatting.
-### isort
-
-All Python code must also be passed through [isort](https://pycqa.github.io/isort/index.html).
-This ensures a harmonised imports throughout the package, from all contributors.
-
-To run isort on the command line recursively on the whole repository you can use:
-
-```bash
-isort .
-```
-
-isort also has [plugins for most common editors](https://github.com/pycqa/isort/wiki/isort-Plugins)
-to automatically format code when you hit save.
-Or [version control integration](https://pycqa.github.io/isort/docs/configuration/pre-commit.html) to set it up to run when you make a commit.
-
-There is an automated CI check that runs when you open a pull-request to nf-core/tools that will fail if
-any code does not adhere to isort formatting.
+Ruff has been adopted for linting and formatting in replacement of Black, isort (for imports) and pyupgrade. It also includes Flake8.
### pre-commit hooks
-This repository comes with [pre-commit](https://pre-commit.com/) hooks for black, isort and Prettier. pre-commit automatically runs checks before a commit is committed into the git history. If all checks pass, the commit is made, if files are changed by the pre-commit hooks, the user is informed and has to stage the changes and attempt the commit again.
+This repository comes with [pre-commit](https://pre-commit.com/) hooks for ruff and Prettier. pre-commit automatically runs checks before a commit is committed into the git history. If all checks pass, the commit is made, if files are changed by the pre-commit hooks, the user is informed and has to stage the changes and attempt the commit again.
You can use the pre-commit hooks if you like, but you don't have to. The CI on Github will run the same checks as the tools installed with pre-commit. If the pre-commit checks pass, then the same checks in the CI will pass, too.
diff --git a/.github/renovate.json5 b/.github/renovate.json5
index f9b377c615..8d123ab17a 100644
--- a/.github/renovate.json5
+++ b/.github/renovate.json5
@@ -1,5 +1,17 @@
{
$schema: "https://docs.renovatebot.com/renovate-schema.json",
extends: ["github>nf-core/ops//.github/renovate/default.json5"],
+ ignorePaths: ["**/nf_core/pipeline-template/modules/nf-core/**"],
baseBranches: ["dev"],
+ packageRules: [
+ {
+ matchDatasources: ["docker"],
+ matchPackageNames: ["python"],
+ versioning: "pep440",
+ },
+ {
+ matchDatasources: ["docker"],
+ registryUrls: ["docker.io"],
+ },
+ ],
}
diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml
index dd64ffa3e5..54dee6df16 100644
--- a/.github/workflows/branch.yml
+++ b/.github/workflows/branch.yml
@@ -18,7 +18,7 @@ jobs:
# If the above check failed, post a comment on the PR explaining the failure
- name: Post PR comment
if: failure()
- uses: mshick/add-pr-comment@v1
+ uses: mshick/add-pr-comment@v2
with:
message: |
## This PR is against the `master` branch :x:
diff --git a/.github/workflows/changelog.py b/.github/workflows/changelog.py
new file mode 100644
index 0000000000..eb56499c93
--- /dev/null
+++ b/.github/workflows/changelog.py
@@ -0,0 +1,228 @@
+"""
+Taken from https://github.com/MultiQC/MultiQC/blob/main/.github/workflows/changelog.py and updated for nf-core
+
+To be called by a CI action. Assumes the following environment variables are set:
+PR_TITLE, PR_NUMBER, GITHUB_WORKSPACE.
+
+Adds a line into the CHANGELOG.md:
+* Looks for the section to add the line to, based on the PR title, e.g. `Template:`, `Modules:`.
+* All other change will go under the "### General" section.
+* If an entry for the PR is already added, it will not run.
+
+Other assumptions:
+- CHANGELOG.md has a running section for an ongoing "dev" version
+(i.e. titled "## nf-core vX.Ydev").
+"""
+
+import os
+import re
+import sys
+from pathlib import Path
+from typing import List
+
+REPO_URL = "https://github.com/nf-core/tools"
+
+# Assumes the environment is set by the GitHub action.
+pr_title = os.environ["PR_TITLE"]
+pr_number = os.environ["PR_NUMBER"]
+comment = os.environ.get("COMMENT", "")
+workspace_path = Path(os.environ.get("GITHUB_WORKSPACE", ""))
+
+assert pr_title, pr_title
+assert pr_number, pr_number
+
+# Trim the PR number added when GitHub squashes commits, e.g. "Template: Updated (#2026)"
+pr_title = pr_title.removesuffix(f" (#{pr_number})")
+
+changelog_path = workspace_path / "CHANGELOG.md"
+
+if any(
+ line in pr_title.lower()
+ for line in [
+ "skip changelog",
+ "skip change log",
+ "no changelog",
+ "no change log",
+ "bump version",
+ ]
+):
+ print("Skipping changelog update")
+ sys.exit(0)
+
+
+def _determine_change_type(pr_title) -> tuple[str, str]:
+ """
+ Determine the type of the PR: Template, Download, Linting, Modules, Subworkflows, or General
+ Returns a tuple of the section name and the module info.
+ """
+ sections = {
+ "Template": "### Template",
+ "Download": "### Download",
+ "Linting": "### Linting",
+ "Modules": "### Modules",
+ "Subworkflows": "### Subworkflows",
+ }
+ current_section_header = "### General"
+ current_section = "General"
+
+ # Check if the PR in any of the sections.
+ for section, section_header in sections.items():
+ # check if the PR title contains any of the section headers, with some loose matching, e.g. removing plural and suffixes
+ if re.sub(r"s$", "", section.lower().replace("ing", "")) in pr_title.lower():
+ current_section_header = section_header
+ current_section = section
+ print(f"Detected section: {current_section}")
+ return current_section, current_section_header
+
+
+# Determine the type of the PR
+section, section_header = _determine_change_type(pr_title)
+
+# Remove section indicator from the PR title.
+pr_title = re.sub(rf"{section}[:\s]*", "", pr_title, flags=re.IGNORECASE)
+
+# Prepare the change log entry.
+pr_link = f"([#{pr_number}]({REPO_URL}/pull/{pr_number}))"
+
+# Handle manual changelog entries through comments.
+if comment := comment.removeprefix("@nf-core-bot changelog").strip():
+ print(f"Adding manual changelog entry: {comment}")
+ pr_title = comment
+new_lines = [
+ f"- {pr_title} {pr_link}\n",
+]
+
+print(f"Adding new lines into section '{section}':\n" + "".join(new_lines))
+
+# Finally, updating the changelog.
+# Read the current changelog lines. We will print them back as is, except for one new
+# entry, corresponding to this new PR.
+with changelog_path.open("r") as f:
+ orig_lines = f.readlines()
+updated_lines: List[str] = []
+
+
+def _skip_existing_entry_for_this_pr(line: str, same_section: bool = True) -> str:
+ if line.strip().endswith(pr_link):
+ print(f"Found existing entry for this pull request #{pr_number}:")
+ existing_lines = [line]
+ if new_lines and new_lines == existing_lines and same_section:
+ print(f"Found existing identical entry for this pull request #{pr_number} in the same section:")
+ print("".join(existing_lines))
+ sys.exit(0) # Just leaving the CHANGELOG intact
+ else:
+ print(
+ f"Found existing entry for this pull request #{pr_number}. It will be replaced and/or moved to proper section"
+ )
+ print("".join(existing_lines))
+ for _ in range(len(existing_lines)):
+ try:
+ line = orig_lines.pop(0)
+ except IndexError:
+ break
+ return line
+
+
+# Find the next line in the change log that matches the pattern "# nf-core/tools v.*dev"
+# If it doesn't exist, exist with code 1 (let's assume that a new section is added
+# manually or by CI when a release is pushed).
+# Else, find the next line that matches the `section` variable, and insert a new line
+# under it (we also assume that section headers are added already).
+inside_version_dev = False
+already_added_entry = False
+while orig_lines:
+ line = orig_lines.pop(0)
+
+ # If the line already contains a link to the PR, don't add it again.
+ line = _skip_existing_entry_for_this_pr(line, same_section=False)
+
+ if line.startswith("# ") and not line.strip() == "# nf-core/tools: Changelog": # Version header, e.g. "# v2.12dev"
+ print(f"Found version header: {line.strip()}")
+ updated_lines.append(line)
+
+ # Parse version from the line `# v2.12dev` or
+ # `# [v2.11.1 - Magnesium Dragon Patch](https://github.com/nf-core/tools/releases/tag/2.11) - [2023-12-20]` ...
+ if not (m := re.match(r".*(v\d+\.\d+(dev)?).*", line)):
+ print(f"Cannot parse version from line {line.strip()}.", file=sys.stderr)
+ sys.exit(1)
+ version = m.group(1)
+
+ if not inside_version_dev:
+ if not version.endswith("dev"):
+ print(
+ "Can't find a 'dev' version section in the changelog. Make sure "
+ "it's created, and all the required sections, e.g. `### Template` are created under it .",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ inside_version_dev = True
+ else:
+ if version.endswith("dev"):
+ print(
+ f"Found another 'dev' version section in the changelog, make"
+ f"sure to change it to a 'release' stable version tag. "
+ f"Line: {line.strip()}",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ # We are past the dev version, so just add back the rest of the lines and break.
+ while orig_lines:
+ line = orig_lines.pop(0)
+ line = _skip_existing_entry_for_this_pr(line, same_section=False)
+ if line:
+ updated_lines.append(line)
+ break
+ continue
+ print(f"Found line: {line.strip()}")
+ print(f"inside_version_dev: {inside_version_dev}")
+ print(f"section_header: {section_header}")
+ if inside_version_dev and line.lower().startswith(section_header.lower()): # Section of interest header
+ print(f"Found section header: {line.strip()}")
+ if already_added_entry:
+ print(
+ f"Already added new lines into section {section}, is the section duplicated?",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ updated_lines.append(line)
+ # Collecting lines until the next section.
+ section_lines: List[str] = []
+ while True:
+ line = orig_lines.pop(0)
+ if line.startswith("#"):
+ print(f"Found the next section header: {line.strip()}")
+ # Found the next section header, so need to put all the lines we collected.
+ updated_lines.append("\n")
+ _updated_lines = [_l for _l in section_lines + new_lines if _l.strip()]
+ updated_lines.extend(_updated_lines)
+ updated_lines.append("\n")
+ if new_lines:
+ print(f"Updated {changelog_path} section '{section}' with lines:\n" + "".join(new_lines))
+ else:
+ print(f"Removed existing entry from {changelog_path} section '{section}'")
+ already_added_entry = True
+ # Pushing back the next section header line
+ orig_lines.insert(0, line)
+ break
+ # If the line already contains a link to the PR, don't add it again.
+ line = _skip_existing_entry_for_this_pr(line, same_section=True)
+ section_lines.append(line)
+ else:
+ updated_lines.append(line)
+
+
+def collapse_newlines(lines: List[str]) -> List[str]:
+ updated = []
+ for idx in range(len(lines)):
+ if idx != 0 and not lines[idx].strip() and not lines[idx - 1].strip():
+ continue
+ updated.append(lines[idx])
+ return updated
+
+
+updated_lines = collapse_newlines(updated_lines)
+
+
+# Finally, writing the updated lines back.
+with changelog_path.open("w") as f:
+ f.writelines(updated_lines)
diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml
new file mode 100644
index 0000000000..63dcf2e428
--- /dev/null
+++ b/.github/workflows/changelog.yml
@@ -0,0 +1,88 @@
+name: Update CHANGELOG.md
+on:
+ issue_comment:
+ types: [created]
+ pull_request_target:
+ types: [opened]
+
+jobs:
+ update_changelog:
+ runs-on: ubuntu-latest
+ # Run if comment is on a PR with the main repo, and if it contains the magic keywords.
+ # Or run on PR creation, unless asked otherwise in the title.
+ if: |
+ github.repository_owner == 'nf-core' && (
+ github.event_name == 'pull_request_target' ||
+ github.event.issue.pull_request && startsWith(github.event.comment.body, '@nf-core-bot changelog')
+ )
+
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }}
+
+ # Action runs on the issue comment, so we don't get the PR by default.
+ # Use the GitHub CLI to check out the PR:
+ - name: Checkout Pull Request
+ env:
+ GH_TOKEN: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }}
+ run: |
+ if [[ "${{ github.event_name }}" == "issue_comment" ]]; then
+ PR_NUMBER="${{ github.event.issue.number }}"
+ elif [[ "${{ github.event_name }}" == "pull_request_target" ]]; then
+ PR_NUMBER="${{ github.event.pull_request.number }}"
+ fi
+ gh pr checkout $PR_NUMBER
+
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install packages
+ run: |
+ python -m pip install --upgrade pip
+ pip install pyyaml
+
+ - name: Update CHANGELOG.md from the PR title
+ env:
+ COMMENT: ${{ github.event.comment.body }}
+ GH_TOKEN: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }}
+ run: |
+ if [[ "${{ github.event_name }}" == "issue_comment" ]]; then
+ export PR_NUMBER='${{ github.event.issue.number }}'
+ export PR_TITLE='${{ github.event.issue.title }}'
+ elif [[ "${{ github.event_name }}" == "pull_request_target" ]]; then
+ export PR_NUMBER='${{ github.event.pull_request.number }}'
+ export PR_TITLE='${{ github.event.pull_request.title }}'
+ fi
+ python ${GITHUB_WORKSPACE}/.github/workflows/changelog.py
+
+ - name: Check if CHANGELOG.md actually changed
+ run: |
+ git diff --exit-code ${GITHUB_WORKSPACE}/CHANGELOG.md || echo "changed=YES" >> $GITHUB_ENV
+ echo "File changed: ${{ env.changed }}"
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.11
+ cache: "pip"
+
+ - name: Install pre-commit
+ run: pip install pre-commit
+
+ - name: Run pre-commit checks
+ if: env.changed == 'YES'
+ run: |
+ pre-commit run --all-files
+
+ - name: Commit and push changes
+ if: env.changed == 'YES'
+ run: |
+ git config user.email "core@nf-co.re"
+ git config user.name "nf-core-bot"
+ git config push.default upstream
+ git add ${GITHUB_WORKSPACE}/CHANGELOG.md
+ git status
+ git commit -m "[automated] Update CHANGELOG.md"
+ git push
diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml
index 4b55c5e4aa..ff311f9df8 100644
--- a/.github/workflows/clean-up.yml
+++ b/.github/workflows/clean-up.yml
@@ -10,7 +10,7 @@ jobs:
issues: write
pull-requests: write
steps:
- - uses: actions/stale@v7
+ - uses: actions/stale@v9
with:
stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days."
stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful."
diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml
index 57dbe86d65..0119efcd41 100644
--- a/.github/workflows/create-lint-wf.yml
+++ b/.github/workflows/create-lint-wf.yml
@@ -26,15 +26,22 @@ jobs:
- "23.04.0"
- "latest-everything"
steps:
+ - name: go to subdirectory and change nextflow workdir
+ run: |
+ mkdir -p create-lint-wf
+ cd create-lint-wf
+ export NXF_WORK=$(pwd)
+
# Get the repo code
- uses: actions/checkout@v4
name: Check out source-code repository
# Set up nf-core/tools
- name: Set up Python 3.11
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.11
+ cache: pip
- name: Install python dependencies
run: |
@@ -47,14 +54,6 @@ jobs:
with:
version: ${{ matrix.NXF_VER }}
- # Install the Prettier linting tools
- - uses: actions/setup-node@v4
- with:
- node-version: "20"
-
- - name: Install Prettier and editorconfig-checker
- run: npm install -g prettier editorconfig-checker
-
# Build a pipeline from the template
- name: nf-core create
run: |
@@ -73,11 +72,8 @@ jobs:
working-directory: create-lint-wf
# Run code style linting
- - name: Run Prettier --check
- run: prettier --check create-lint-wf/nf-core-testpipeline
-
- - name: Run ECLint check
- run: editorconfig-checker -exclude README.md $(find nf-core-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile')
+ - name: run pre-commit
+ run: pre-commit run --all-files
working-directory: create-lint-wf
# Update modules to the latest version
@@ -142,7 +138,11 @@ jobs:
- name: Upload log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: nf-core-log-file
+ name: nf-core-log-file-${{ matrix.NXF_VER }}
path: create-lint-wf/log.txt
+
+ - name: Cleanup work directory
+ run: sudo rm -rf create-lint-wf
+ if: always()
diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml
index 37cbf65c7d..3805c1a240 100644
--- a/.github/workflows/create-test-lint-wf-template.yml
+++ b/.github/workflows/create-test-lint-wf-template.yml
@@ -20,24 +20,37 @@ env:
jobs:
RunTestWorkflow:
- runs-on: ubuntu-latest
+ runs-on: ${{ matrix.runner }}
env:
NXF_ANSI_LOG: false
strategy:
matrix:
TEMPLATE:
- - "template_skip_all.yml"
- "template_skip_github_badges.yml"
- "template_skip_igenomes.yml"
- "template_skip_ci.yml"
- - "template_skip_nf_core_configs.yml"
+ runner: ["self-hosted"]
+ profile: ["self_hosted_runner"]
+ include:
+ - TEMPLATE: "template_skip_all.yml"
+ runner: ubuntu-latest
+ profile: "docker"
+ - TEMPLATE: "template_skip_nf_core_configs.yml"
+ runner: ubuntu-latest
+ profile: "docker"
steps:
+ - name: go to working directory
+ run: |
+ mkdir -p create-lint-wf-template
+ cd create-lint-wf-template
+ export NXF_WORK=$(pwd)
+
- uses: actions/checkout@v4
name: Check out source-code repository
- name: Set up Python 3.11
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.11
@@ -51,18 +64,6 @@ jobs:
with:
version: latest-everything
- # Install the Prettier linting tools
- - uses: actions/setup-node@v4
- with:
- node-version: "20"
-
- - name: Install Prettier
- run: npm install -g prettier
-
- # Install the editorconfig linting tools
- - name: Install editorconfig-checker
- run: npm install -g editorconfig-checker
-
# Create template files
- name: Create template skip all (except github)
run: |
@@ -95,7 +96,7 @@ jobs:
- name: run the pipeline
run: |
cd create-test-lint-wf
- nextflow run my-prefix-testpipeline -profile test,docker --outdir ./results
+ nextflow run my-prefix-testpipeline -profile test,${{matrix.profile}} --outdir ./results
# Remove results folder before linting
- name: remove results folder
@@ -107,11 +108,8 @@ jobs:
run: nf-core --log-file log.txt sync --dir create-test-lint-wf/my-prefix-testpipeline/
# Run code style linting
- - name: Run Prettier --check
- run: prettier --check create-test-lint-wf/my-prefix-testpipeline
-
- - name: Run ECLint check
- run: editorconfig-checker -exclude README.md $(find my-prefix-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile')
+ - name: Run pre-commit
+ run: pre-commit run --all-files
working-directory: create-test-lint-wf
# Remove TODO statements
@@ -145,7 +143,11 @@ jobs:
- name: Upload log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: nf-core-log-file
+ name: nf-core-log-file-${{ matrix.TEMPLATE }}
path: create-test-lint-wf/artifact_files.tar
+
+ - name: Cleanup work directory
+ run: sudo rm -rf create-test-lint-wf
+ if: always()
diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml
index 026b0a889b..e128e16a36 100644
--- a/.github/workflows/create-test-wf.yml
+++ b/.github/workflows/create-test-wf.yml
@@ -17,7 +17,7 @@ env:
jobs:
RunTestWorkflow:
- runs-on: ubuntu-latest
+ runs-on: self-hosted
env:
NXF_ANSI_LOG: false
strategy:
@@ -26,11 +26,17 @@ jobs:
- "23.04.0"
- "latest-everything"
steps:
+ - name: go to working directory
+ run: |
+ mkdir -p create-test-wf
+ cd create-test-wf
+ export NXF_WORK=$(pwd)
+
- uses: actions/checkout@v4
name: Check out source-code repository
- name: Set up Python 3.11
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.11
@@ -49,11 +55,16 @@ jobs:
mkdir create-test-wf && cd create-test-wf
export NXF_WORK=$(pwd)
nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain
- nextflow run nf-core-testpipeline -profile test,docker --outdir ./results
+ nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results
- name: Upload log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: nf-core-log-file
+ name: nf-core-log-file-${{ matrix.NXF_VER }}
path: create-test-wf/log.txt
+
+ - name: Cleanup work directory
+ # cleanup work directory
+ run: sudo rm -rf create-test-wf
+ if: always()
diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml
index 62c53508d8..8d3a154d80 100644
--- a/.github/workflows/deploy-pypi.yml
+++ b/.github/workflows/deploy-pypi.yml
@@ -17,7 +17,7 @@ jobs:
name: Check out source-code repository
- name: Set up Python 3.11
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.11
diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml
index 4184bc5e59..5f157b6b5a 100644
--- a/.github/workflows/fix-linting.yml
+++ b/.github/workflows/fix-linting.yml
@@ -10,13 +10,20 @@ jobs:
contains(github.event.comment.html_url, '/pull/') &&
contains(github.event.comment.body, '@nf-core-bot fix linting') &&
github.repository == 'nf-core/tools'
- runs-on: ubuntu-latest
+ runs-on: self-hosted
steps:
# Use the @nf-core-bot token to check out so we can push later
- uses: actions/checkout@v4
with:
token: ${{ secrets.nf_core_bot_auth_token }}
+ # indication that the linting is being fixed
+ - name: React on comment
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: eyes
+
# Action runs on the issue comment, so we don't get the PR by default
# Use the gh cli to check out the PR
- name: Checkout Pull Request
@@ -24,33 +31,31 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}
- - uses: actions/setup-node@v4
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
with:
- node-version: "20"
+ python-version: 3.11
+ cache: "pip"
- - name: Install Prettier
- run: npm install -g prettier @prettier/plugin-php
+ - name: Install pre-commit
+ run: pip install pre-commit
- - name: Run 'prettier --write'
- run: prettier --write ${GITHUB_WORKSPACE}
+ - name: Run pre-commit
+ id: pre-commit
+ run: pre-commit run --all-files
+ continue-on-error: true
- - name: Run Black
- uses: psf/black@stable
+ # indication that the linting has finished
+ - name: react if linting finished succesfully
+ if: ${{ steps.pre-commit.outcome }} == 'success'
+ uses: peter-evans/create-or-update-comment@v3
with:
- # Override to remove the default --check flag so that we make changes
- options: "--color"
-
- - name: Set up Python 3.11
- uses: actions/setup-python@v4
- with:
- python-version: 3.11
- - name: python-isort
- uses: isort/isort-action@v1.0.0
- with:
- isortVersion: "latest"
- requirementsFiles: "requirements.txt requirements-dev.txt"
+ comment-id: ${{ github.event.comment.id }}
+ reactions: green_check_mark
- name: Commit & push changes
+ id: commit-and-push
+ if: ${{ steps.pre-commit.outcome }} == 'failure'
run: |
git config user.email "core@nf-co.re"
git config user.name "nf-core-bot"
@@ -59,3 +64,19 @@ jobs:
git status
git commit -m "[automated] Fix code linting"
git push
+
+ - name: react if linting errors were fixed
+ if: ${{ steps.commit-and-push.outcome }} == 'success'
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: pencil2
+
+ - name: react if linting errors were not fixed
+ if: ${{ steps.commit-and-push.outcome }} == 'failure'
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: x
+ body: |
+ @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually.
diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml
index 23972c56f6..d9847dd365 100644
--- a/.github/workflows/lint-code.yml
+++ b/.github/workflows/lint-code.yml
@@ -13,119 +13,21 @@ concurrency:
cancel-in-progress: true
jobs:
- EditorConfig:
- runs-on: ["self-hosted"]
+ Pre-commit:
+ runs-on: ubuntu-latest
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-node@v4
- with:
- node-version: "20"
-
- - name: Install editorconfig-checker
- run: npm install -g editorconfig-checker
-
- # Run editor config check only on files not covered by a linter
- - name: Run ECLint check
- run: editorconfig-checker -exclude README.md $(git ls-files | grep -v 'test\|.py\|md\|json\|yml\|yaml\|html\|css\|Makefile')
-
- Prettier:
- runs-on: ["self-hosted"]
- steps:
- - uses: actions/checkout@v4
-
- - uses: actions/setup-node@v4
- with:
- node-version: "20"
-
- - name: Install Prettier
- run: npm install -g prettier
-
- - name: Run Prettier --check
- run: prettier --check ${GITHUB_WORKSPACE}
-
- PythonBlack:
- runs-on: ["self-hosted"]
- steps:
- - uses: actions/checkout@v4
-
- - name: Check code lints with Black
- uses: psf/black@stable
-
- # If the above check failed, post a comment on the PR explaining the failure
- - name: Post PR comment
- if: failure()
- uses: mshick/add-pr-comment@v1
- with:
- message: |
- ## Python linting (`black`) is failing
-
- To keep the code consistent with lots of contributors, we run automated code consistency checks.
- To fix this CI test, please run:
-
- * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black`
- * Fix formatting errors in your pipeline: `black .`
-
- Once you push these changes the test should pass, and you can hide this comment :+1:
-
- We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help!
-
- Thanks again for your contribution!
- repo-token: ${{ secrets.GITHUB_TOKEN }}
- allow-repeats: false
-
- isort:
- runs-on: ["self-hosted"]
- steps:
- - name: Check out source-code repository
- uses: actions/checkout@v4
-
- name: Set up Python 3.11
- uses: actions/setup-python@v4
- with:
- python-version: 3.11
- - name: python-isort
- uses: isort/isort-action@v1.1.0
- with:
- isortVersion: "latest"
- requirementsFiles: "requirements.txt requirements-dev.txt"
-
- static-type-check:
- runs-on: ["self-hosted"]
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.11
cache: "pip"
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip -r requirements-dev.txt
- pip install -e .
+ - name: Install pre-commit
+ run: pip install pre-commit
- - name: Cache nf-test installation
- id: cache-software
- uses: actions/cache@v3
- with:
- path: |
- /usr/local/bin/nf-test
- /home/runner/.nf-test/nf-test.jar
- key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest
-
- - name: Install nf-test
- if: steps.cache-software.outputs.cache-hit != 'true'
- run: |
- wget -qO- https://code.askimed.com/install/nf-test | bash
- sudo mv nf-test /usr/local/bin/
-
- - name: Get Python changed files
- id: changed-py-files
- uses: tj-actions/changed-files@v23
- with:
- files: |
- *.py
- **/*.py
- - name: Run if any of the listed files above is changed
- if: steps.changed-py-files.outputs.any_changed == 'true'
- run: mypy ${{ steps.changed-py-files.outputs.all_changed_files }}
+ - name: Run pre-commit
+ run: pre-commit run --all-files
diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml
index 1230bfc9d3..169a917d83 100644
--- a/.github/workflows/push_dockerhub_dev.yml
+++ b/.github/workflows/push_dockerhub_dev.yml
@@ -13,7 +13,7 @@ concurrency:
jobs:
push_dockerhub:
name: Push new Docker image to Docker Hub (dev)
- runs-on: self-hosted
+ runs-on: ubuntu-latest
# Only run for the nf-core repo, for releases and merged PRs
if: ${{ github.repository == 'nf-core/tools' }}
env:
diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml
index b6f3592165..8cec6a63d7 100644
--- a/.github/workflows/pytest.yml
+++ b/.github/workflows/pytest.yml
@@ -14,8 +14,9 @@ on:
- "CHANGELOG.md"
release:
types: [published]
+ workflow_dispatch:
-# Cancel if a newer run is started
+# Cancel if a newer run with the same workflow name is queued
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
@@ -25,7 +26,7 @@ env:
jobs:
setup:
- runs-on: ["ubuntu-latest"]
+ runs-on: "ubuntu-latest"
strategy:
matrix:
python-version: ["3.8", "3.11"]
@@ -44,20 +45,45 @@ jobs:
runner: ${{ matrix.runner }}
run-tests: ${{ steps.conditions.outputs.run-tests }}
+ # create a test matrix based on all python files in /tests
+ list_tests:
+ name: Get test file matrix
+ runs-on: "ubuntu-latest"
+ steps:
+ - uses: actions/checkout@v4
+ name: Check out source-code repository
+
+ - name: List tests
+ id: list_tests
+ run: |
+ echo "tests=$(find tests/test_* | tac | sed 's/tests\///g' | jq -R -s -c '{test: (split("\n")[:-1])}')" >> $GITHUB_OUTPUT
+ outputs:
+ tests: ${{ steps.list_tests.outputs.tests }}
+
test:
- name: Test with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }}
- needs: setup
- if: ${{ needs.setup.outputs.run-tests}}
- runs-on: ${{ needs.setup.outputs.runner }}
+ name: Run ${{matrix.test}} with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }}
+ needs: [setup, list_tests]
+ if: ${{ needs.setup.outputs.run-tests }}
+ runs-on: self-hosted
+ strategy:
+ matrix: ${{ fromJson(needs.list_tests.outputs.tests) }}
+ fail-fast: false # run all tests even if one fails
steps:
- - uses: actions/checkout@v2
+ - name: go to subdirectory and change nextflow workdir
+ run: |
+ mkdir -p pytest
+ cd pytest
+ export NXF_WORK=$(pwd)
+
+ - uses: actions/checkout@v4
name: Check out source-code repository
- name: Set up Python ${{ needs.setup.outputs.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v5
with:
python-version: ${{ needs.setup.outputs.python-version }}
cache: "pip"
+ token: ${{ secrets.GITHUB_TOKEN }}
- name: Install dependencies
run: |
@@ -71,16 +97,25 @@ jobs:
sudo apt remove -y git git-man
sudo add-apt-repository --remove ppa:git-core/ppa
sudo apt install -y git
+
- name: Get current date
id: date
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV
- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
- with:
- version: "latest-everything"
+
+ - name: Look if nf-test is already installed and write to env variable
+ id: check-nftest
+ run: |
+ if [ -f /usr/local/bin/nf-test ]; then
+ echo "nftest_installed=true" >> $GITHUB_ENV
+ else
+ echo "nftest_installed=false" >> $GITHUB_ENV
+ fi
- name: Cache nf-test installation
+ if: env.nftest_installed != 'true'
id: cache-software
uses: actions/cache@v3
with:
@@ -90,16 +125,70 @@ jobs:
key: ${{ runner.os }}-nftest-${{ env.date }}
- name: Install nf-test
- if: steps.cache-software.outputs.cache-hit != 'true'
+ if: steps.cache-software.outputs.cache-hit != 'true' && env.nftest_installed != 'true'
run: |
wget -qO- https://code.askimed.com/install/nf-test | bash
sudo mv nf-test /usr/local/bin/
+ - name: move coveragerc file up
+ run: |
+ mv .github/.coveragerc .
+
- name: Test with pytest
- run: python3 -m pytest tests/ --color=yes --cov-report=xml --cov-config=.github/.coveragerc --cov=nf_core
+ run: |
+ python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$?
+ # don't fail if no tests were collected, e.g. for test_licence.py
+ if [ "${exit_code}" -eq 5 ]; then
+ echo "No tests were collected"
+ exit 0
+ elif [ "${exit_code}" -ne 0 ]; then
+ echo "Tests failed with exit code ${exit_code}"
+ exit 1
+ fi
+
+ - name: Upload coverage
+ uses: actions/upload-artifact@v4
+ with:
+ name: coverage_${{ matrix.test }}
+ path: .coverage
+
+ coverage:
+ needs: test
+ runs-on: self-hosted
+ steps:
+ - name: go to subdirectory
+ run: |
+ mkdir -p pytest
+ cd pytest
+
+ - uses: actions/checkout@v4
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ env:
+ AGENT_TOOLSDIRECTORY: /opt/actions-runner/_work/tools/tools/
+ with:
+ python-version: 3.11
+ cache: "pip"
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip -r requirements-dev.txt
+ pip install -e .
+
+ - name: move coveragerc file up
+ run: |
+ mv .github/.coveragerc .
+
+ - name: Download all artifacts
+ uses: actions/download-artifact@v4
+ - name: Run coverage
+ run: |
+ coverage combine --keep coverage*/.coverage*
+ coverage report
+ coverage xml
- - uses: codecov/codecov-action@v1
- name: Upload code coverage report
+ - uses: codecov/codecov-action@v3
with:
- if: success()
- token: ${{ secrets.CODECOV_TOKEN }}
+ files: coverage.xml
+ env:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml
index f5f289b73e..e0981e8241 100644
--- a/.github/workflows/rich-codex.yml
+++ b/.github/workflows/rich-codex.yml
@@ -8,7 +8,7 @@ jobs:
- name: Check out the repo
uses: actions/checkout@v4
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.x
cache: pip
diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml
index 94f8ee54e2..9bbc12cf4b 100644
--- a/.github/workflows/sync.yml
+++ b/.github/workflows/sync.yml
@@ -16,7 +16,7 @@ concurrency:
jobs:
get-pipelines:
- runs-on: ubuntu-latest
+ runs-on: self-hosted
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
@@ -49,7 +49,7 @@ jobs:
fetch-depth: "0"
- name: Set up Python 3.11
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.11
@@ -78,7 +78,7 @@ jobs:
- name: Upload sync log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: sync_log_${{ matrix.pipeline }}
path: sync_log_${{ matrix.pipeline }}.txt
diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml
index 51c25fa250..91396b2a25 100644
--- a/.github/workflows/tools-api-docs-dev.yml
+++ b/.github/workflows/tools-api-docs-dev.yml
@@ -20,14 +20,14 @@ concurrency:
jobs:
api-docs:
name: Build & push Sphinx API docs
- runs-on: self-hosted
+ runs-on: ubuntu-latest
steps:
- name: Check out source-code repository
uses: actions/checkout@v4
- name: Set up Python 3.11
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.11
@@ -43,7 +43,7 @@ jobs:
- name: Sync dev docs
# Only sync with the website if it was a push from nf-core/tools dev branch
if: github.repository == 'nf-core/tools' && github.event_name == 'push' && github.event.ref == 'refs/heads/dev'
- uses: SamKirkland/FTP-Deploy-Action@4.0.0
+ uses: SamKirkland/FTP-Deploy-Action@v4.3.4
with:
server: ${{ secrets.ftp_server }}
username: ${{ secrets.ftp_username}}
diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml
index b0869190d9..2183db3fcf 100644
--- a/.github/workflows/tools-api-docs-release.yml
+++ b/.github/workflows/tools-api-docs-release.yml
@@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@v4
- name: Set up Python 3.11
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.11
@@ -37,7 +37,7 @@ jobs:
- name: Sync release docs
if: github.repository == 'nf-core/tools'
- uses: SamKirkland/FTP-Deploy-Action@4.0.0
+ uses: SamKirkland/FTP-Deploy-Action@v4.3.4
with:
server: ${{ secrets.ftp_server }}
username: ${{ secrets.ftp_username}}
diff --git a/.gitpod.yml b/.gitpod.yml
index 899f58e556..39fb530da2 100644
--- a/.gitpod.yml
+++ b/.gitpod.yml
@@ -1,4 +1,4 @@
-image: nfcore/gitpod:latest
+image: nfcore/gitpod:dev
tasks:
- name: install current state of nf-core/tools and setup pre-commit
command: |
@@ -9,6 +9,7 @@ tasks:
- name: unset JAVA_TOOL_OPTIONS
command: |
unset JAVA_TOOL_OPTIONS
+
vscode:
extensions: # based on nf-core.nf-core-extensionpack
- codezombiech.gitignore # Language support for .gitignore files
@@ -21,3 +22,4 @@ vscode:
# - nextflow.nextflow # Nextflow syntax highlighting
- oderwat.indent-rainbow # Highlight indentation level
- streetsidesoftware.code-spell-checker # Spelling checker for source code
+ - charliermarsh.ruff # Code linter Ruff
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ad23a3c895..56ebb5bfaa 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,23 +1,23 @@
repos:
- - repo: https://github.com/psf/black
- rev: 23.1.0
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.13
hooks:
- - id: black
- - repo: https://github.com/pycqa/isort
- rev: 5.12.0
- hooks:
- - id: isort
+ - id: ruff # linter
+ args: [--fix, --exit-non-zero-on-fix] # sort imports and fix
+ - id: ruff-format # formatter
- repo: https://github.com/pre-commit/mirrors-prettier
- rev: "v2.7.1"
+ rev: "v3.1.0"
hooks:
- id: prettier
- - repo: https://github.com/asottile/pyupgrade
- rev: v3.15.0
+
+ - repo: https://github.com/editorconfig-checker/editorconfig-checker.python
+ rev: "2.7.3"
hooks:
- - id: pyupgrade
- args: [--py38-plus]
+ - id: editorconfig-checker
+ alias: ec
+
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: "v1.7.1" # Use the sha / tag you want to point at
+ rev: "v1.8.0"
hooks:
- id: mypy
additional_dependencies:
diff --git a/.prettierignore b/.prettierignore
index 344cafca6e..a55074abfb 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -6,3 +6,7 @@ testing
nf_core/module-template/meta.yml
nf_core/module-template/tests/tags.yml
nf_core/subworkflow-template/tests/tags.yml
+# don't run on things handled by ruff
+*.py
+*.pyc
+
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 28c02ea066..636be48c56 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,35 @@
# nf-core/tools: Changelog
+# v2.12dev
+
+### Template
+
+- Add a Github Action Workflow to the pipeline template that tests a successful download with 'nf-core download' ([#2618](https://github.com/nf-core/tools/pull/2618))
+- Use `pre-commit` to lint files in GitHub CI ([#2635](https://github.com/nf-core/tools/pull/2635))
+- Use pdiff also on gitpod for nf-test ([#2640](https://github.com/nf-core/tools/pull/2640))
+- switch to new image syntax in readme ([#2645](https://github.com/nf-core/tools/pull/2645))
+- Add conda channel order to nextflow.config ([#2094](https://github.com/nf-core/tools/pull/2094))
+
+### Download
+
+### Linting
+
+### Modules
+
+### Subworkflows
+
+### General
+
+- Run CI-pytests for nf-core tools on self-hosted runners ([#2550](https://github.com/nf-core/tools/pull/2550))
+- Add Ruff linter and formatter replacing Black, isort and pyupgrade ([#2620](https://github.com/nf-core/tools/pull/2620))
+- Update pre-commit hook pre-commit/mirrors-mypy to v1.8.0 ([#2630](https://github.com/nf-core/tools/pull/2630))
+- Update mshick/add-pr-comment action to v2 ([#2632](https://github.com/nf-core/tools/pull/2632))
+- update python image version in docker file ([#2636](https://github.com/nf-core/tools/pull/2636))
+- Set pdiff as nf-test differ in Docker image for Gitpod ([#2642](https://github.com/nf-core/tools/pull/2642))
+- Fix Renovate Dockerfile updating issues ([#2648](https://github.com/nf-core/tools/pull/2648) and [#2651](https://github.com/nf-core/tools/pull/2651))
+- Add new subcommand `nf-core tui`, which launches a TUI (terminal user interface) to intuitively explore the command line flags, built using [Trogon](https://github.com/Textualize/trogon) ([#2655](https://github.com/nf-core/tools/pull/2655))
+- Update pre-commit hook astral-sh/ruff-pre-commit to v0.1.13 ([#2660](https://github.com/nf-core/tools/pull/2660))
+
# [v2.11.1 - Magnesium Dragon Patch](https://github.com/nf-core/tools/releases/tag/2.11) - [2023-12-20]
### Template
diff --git a/Dockerfile b/Dockerfile
index 95d544b26f..9c9770c25f 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.11.5-slim
+FROM python:3.11-slim
LABEL authors="phil.ewels@scilifelab.se,erik.danielsson@scilifelab.se" \
description="Docker image containing requirements for the nfcore tools"
diff --git a/README.md b/README.md
index 653f5295ea..c612c442b8 100644
--- a/README.md
+++ b/README.md
@@ -1,10 +1,14 @@
-#  
+
+
+
+
+
+
[](https://github.com/nf-core/tools/actions?query=workflow%3A%22Python+tests%22+branch%3Amaster)
[](https://codecov.io/gh/nf-core/tools)
-[](https://github.com/psf/black)
[](https://github.com/prettier/prettier)
-[](https://pycqa.github.io/isort/)
+[](https://github.com/charliermarsh/ruff)
[](https://bioconda.github.io/recipes/nf-core/README.html)
[](https://pypi.org/project/nf-core/)
@@ -28,6 +32,7 @@ A python package with helper tools for the nf-core community.
- [`nf-core schema` - Work with pipeline schema files](#pipeline-schema)
- [`nf-core bump-version` - Update nf-core pipeline version number](#bumping-a-pipeline-version-number)
- [`nf-core sync` - Synchronise pipeline TEMPLATE branches](#sync-a-pipeline-with-the-template)
+- [`nf-core tui` - Explore the nf-core command line graphically](#tools-cli-tui)
- [`nf-core modules` - commands for dealing with DSL2 modules](#modules)
- [`modules list` - List available modules](#list-modules)
@@ -727,6 +732,17 @@ To create the pull request, a personal access token is required for API authenti
These can be created at [https://github.com/settings/tokens](https://github.com/settings/tokens).
Supply this using the `--auth-token` flag.
+## Tools CLI TUI
+
+_CLI:_ Command line interface
+_TUI:_ Terminal user interface
+
+The `nf-core` command line interface is fairly large, with a lot of commands and options.
+To make it easier to explore and use, run `nf-core tui` to launch a graphical terminal interface.
+
+This functionality works using [Textualize/trogon](https://github.com/Textualize/trogon)
+and is based on the underlying CLI implementation that uses [Click](https://click.palletsprojects.com/).
+
## Modules
With the advent of [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html), we are creating a centralised repository of modules.
diff --git a/codecov.yml b/codecov.yml
index 1ecf8960c0..11a63f8bbf 100644
--- a/codecov.yml
+++ b/codecov.yml
@@ -4,3 +4,6 @@ coverage:
default:
threshold: 5%
patch: off
+comment:
+ layout: "condensed_header, condensed_files, condensed_footer"
+ require_changes: true
diff --git a/docs/api/Makefile b/docs/api/Makefile
index f961e4ded1..ab30a5051e 100644
--- a/docs/api/Makefile
+++ b/docs/api/Makefile
@@ -16,4 +16,4 @@ help:
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py
index 27eaf9bcb3..bfdbd7888d 100644
--- a/docs/api/_src/conf.py
+++ b/docs/api/_src/conf.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
@@ -16,9 +15,10 @@
import sys
from typing import Dict
-sys.path.insert(0, os.path.abspath("../../../nf_core"))
import nf_core
+sys.path.insert(0, os.path.abspath("../../../nf_core"))
+
# -- Project information -----------------------------------------------------
project = "nf-core/tools"
diff --git a/nf_core/__main__.py b/nf_core/__main__.py
index 8cfacf7399..6dadadf22b 100644
--- a/nf_core/__main__.py
+++ b/nf_core/__main__.py
@@ -3,12 +3,14 @@
import logging
import os
import sys
+from pathlib import Path
import rich
import rich.console
import rich.logging
import rich.traceback
import rich_click as click
+from trogon import tui
from nf_core import __version__
from nf_core.download import DownloadError
@@ -30,11 +32,27 @@
"nf-core": [
{
"name": "Commands for users",
- "commands": ["list", "launch", "create-params-file", "download", "licences"],
+ "commands": [
+ "list",
+ "launch",
+ "create-params-file",
+ "download",
+ "licences",
+ "tui",
+ ],
},
{
"name": "Commands for developers",
- "commands": ["create", "lint", "modules", "subworkflows", "schema", "bump-version", "sync"],
+ "commands": [
+ "create",
+ "lint",
+ "modules",
+ "subworkflows",
+ "schema",
+ "bump-version",
+ "sync",
+ "rocrate",
+ ],
},
],
"nf-core modules": [
@@ -89,10 +107,22 @@ def run_nf_core():
if os.environ.get("_NF_CORE_COMPLETE") is None:
# Print nf-core header
stderr.print(f"\n[green]{' ' * 42},--.[grey39]/[green],-.", highlight=False)
- stderr.print("[blue] ___ __ __ __ ___ [green]/,-._.--~\\", highlight=False)
- stderr.print(r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {", highlight=False)
- stderr.print(r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False)
- stderr.print("[green] `._,._,'\n", highlight=False)
+ stderr.print(
+ "[blue] ___ __ __ __ ___ [green]/,-._.--~\\",
+ highlight=False,
+ )
+ stderr.print(
+ r"[blue] |\ | |__ __ / ` / \ |__) |__ [yellow] } {",
+ highlight=False,
+ )
+ stderr.print(
+ r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,",
+ highlight=False,
+ )
+ stderr.print(
+ "[green] `._,._,'\n",
+ highlight=False,
+ )
stderr.print(
f"[grey39] nf-core/tools version {__version__} - [link=https://nf-co.re]https://nf-co.re[/]",
highlight=False,
@@ -111,9 +141,16 @@ def run_nf_core():
nf_core_cli(auto_envvar_prefix="NFCORE")
+@tui()
@click.group(context_settings=dict(help_option_names=["-h", "--help"]))
@click.version_option(__version__)
-@click.option("-v", "--verbose", is_flag=True, default=False, help="Print verbose output to the console.")
+@click.option(
+ "-v",
+ "--verbose",
+ is_flag=True,
+ default=False,
+ help="Print verbose output to the console.",
+)
@click.option("--hide-progress", is_flag=True, default=False, help="Don't show progress bars.")
@click.option("-l", "--log-file", help="Save a verbose log to a file.", metavar="")
@click.pass_context
@@ -180,7 +217,11 @@ def list_pipelines(keywords, sort, json, show_archived):
@click.option("-r", "--revision", help="Release/branch/SHA of the project to run (if remote)")
@click.option("-i", "--id", help="ID for web-gui launch parameter set")
@click.option(
- "-c", "--command-only", is_flag=True, default=False, help="Create Nextflow command with params (no params file)"
+ "-c",
+ "--command-only",
+ is_flag=True,
+ default=False,
+ help="Create Nextflow command with params (no params file)",
)
@click.option(
"-o",
@@ -190,18 +231,43 @@ def list_pipelines(keywords, sort, json, show_archived):
help="Path to save run parameters file",
)
@click.option(
- "-p", "--params-in", type=click.Path(exists=True), help="Set of input run params to use from a previous run"
+ "-p",
+ "--params-in",
+ type=click.Path(exists=True),
+ help="Set of input run params to use from a previous run",
)
@click.option(
- "-a", "--save-all", is_flag=True, default=False, help="Save all parameters, even if unchanged from default"
+ "-a",
+ "--save-all",
+ is_flag=True,
+ default=False,
+ help="Save all parameters, even if unchanged from default",
)
@click.option(
- "-x", "--show-hidden", is_flag=True, default=False, help="Show hidden params which don't normally need changing"
+ "-x",
+ "--show-hidden",
+ is_flag=True,
+ default=False,
+ help="Show hidden params which don't normally need changing",
)
@click.option(
- "-u", "--url", type=str, default="https://nf-co.re/launch", help="Customise the builder URL (for development work)"
+ "-u",
+ "--url",
+ type=str,
+ default="https://nf-co.re/launch",
+ help="Customise the builder URL (for development work)",
)
-def launch(pipeline, id, revision, command_only, params_in, params_out, save_all, show_hidden, url):
+def launch(
+ pipeline,
+ id,
+ revision,
+ command_only,
+ params_in,
+ params_out,
+ save_all,
+ show_hidden,
+ url,
+):
"""
Launch a pipeline using a web GUI or command line prompts.
@@ -217,7 +283,17 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all
"""
from nf_core.launch import Launch
- launcher = Launch(pipeline, revision, command_only, params_in, params_out, save_all, show_hidden, url, id)
+ launcher = Launch(
+ pipeline,
+ revision,
+ command_only,
+ params_in,
+ params_out,
+ save_all,
+ show_hidden,
+ url,
+ id,
+ )
if not launcher.launch_pipeline():
sys.exit(1)
@@ -236,7 +312,11 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all
)
@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files")
@click.option(
- "-x", "--show-hidden", is_flag=True, default=False, help="Show hidden params which don't normally need changing"
+ "-x",
+ "--show-hidden",
+ is_flag=True,
+ default=False,
+ help="Show hidden params which don't normally need changing",
)
def create_params_file(pipeline, revision, output, force, show_hidden):
"""
@@ -267,10 +347,19 @@ def create_params_file(pipeline, revision, output, force, show_hidden):
)
@click.option("-o", "--outdir", type=str, help="Output directory")
@click.option(
- "-x", "--compress", type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), help="Archive compression type"
+ "-x",
+ "--compress",
+ type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]),
+ help="Archive compression type",
)
@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files")
-@click.option("-t", "--tower", is_flag=True, default=False, help="Download for seqeralabs® Nextflow Tower")
+@click.option(
+ "-t",
+ "--tower",
+ is_flag=True,
+ default=False,
+ help="Download for seqeralabs® Nextflow Tower",
+)
@click.option(
"-d",
"--download-configuration",
@@ -304,7 +393,13 @@ def create_params_file(pipeline, revision, output, force, show_hidden):
type=str,
help="List of images already available in a remote `singularity.cacheDir`.",
)
-@click.option("-p", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads")
+@click.option(
+ "-p",
+ "--parallel-downloads",
+ type=int,
+ default=4,
+ help="Number of parallel image downloads",
+)
def download(
pipeline,
revision,
@@ -378,7 +473,13 @@ def licences(pipeline, json):
@click.option("-d", "--description", type=str, help="A short description of your pipeline")
@click.option("-a", "--author", type=str, help="Name of the main author(s)")
@click.option("--version", type=str, default="1.0dev", help="The initial version number to use")
-@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite output directory if it already exists")
+@click.option(
+ "-f",
+ "--force",
+ is_flag=True,
+ default=False,
+ help="Overwrite output directory if it already exists",
+)
@click.option("-o", "--outdir", help="Output directory for new pipeline (default: pipeline name)")
@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template")
@click.option("--plain", is_flag=True, help="Use the standard nf-core template")
@@ -426,14 +527,36 @@ def create(name, description, author, version, force, outdir, template_yaml, pla
help="Execute additional checks for release-ready workflows.",
)
@click.option(
- "-f", "--fix", type=str, metavar="", multiple=True, help="Attempt to automatically fix specified lint test"
+ "-f",
+ "--fix",
+ type=str,
+ metavar="",
+ multiple=True,
+ help="Attempt to automatically fix specified lint test",
+)
+@click.option(
+ "-k",
+ "--key",
+ type=str,
+ metavar="",
+ multiple=True,
+ help="Run only these lint tests",
)
-@click.option("-k", "--key", type=str, metavar="", multiple=True, help="Run only these lint tests")
@click.option("-p", "--show-passed", is_flag=True, help="Show passing tests on the command line")
@click.option("-i", "--fail-ignored", is_flag=True, help="Convert ignored tests to failures")
@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures")
-@click.option("--markdown", type=str, metavar="", help="File to write linting results to (Markdown)")
-@click.option("--json", type=str, metavar="", help="File to write linting results to (JSON)")
+@click.option(
+ "--markdown",
+ type=str,
+ metavar="",
+ help="File to write linting results to (Markdown)",
+)
+@click.option(
+ "--json",
+ type=str,
+ metavar="",
+ help="File to write linting results to (JSON)",
+)
@click.option(
"--sort-by",
type=click.Choice(["module", "test"]),
@@ -442,7 +565,19 @@ def create(name, description, author, version, force, outdir, template_yaml, pla
show_default=True,
)
@click.pass_context
-def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, markdown, json, sort_by):
+def lint(
+ ctx,
+ dir,
+ release,
+ fix,
+ key,
+ show_passed,
+ fail_ignored,
+ fail_warned,
+ markdown,
+ json,
+ sort_by,
+):
"""
Check pipeline code against nf-core guidelines.
@@ -497,7 +632,13 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma
default=NF_CORE_MODULES_REMOTE,
help="Remote git repo to fetch files from",
)
-@click.option("-b", "--branch", type=str, default=None, help="Branch of git repository hosting modules.")
+@click.option(
+ "-b",
+ "--branch",
+ type=str,
+ default=None,
+ help="Branch of git repository hosting modules.",
+)
@click.option(
"-N",
"--no-pull",
@@ -529,7 +670,13 @@ def modules(ctx, git_remote, branch, no_pull):
default=NF_CORE_MODULES_REMOTE,
help="Remote git repo to fetch files from",
)
-@click.option("-b", "--branch", type=str, default=None, help="Branch of git repository hosting modules.")
+@click.option(
+ "-b",
+ "--branch",
+ type=str,
+ default=None,
+ help="Branch of git repository hosting modules.",
+)
@click.option(
"-N",
"--no-pull",
@@ -630,8 +777,20 @@ def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-b
default=".",
help=r"Pipeline directory. [dim]\[default: current working directory][/]",
)
-@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module")
-@click.option("-f", "--force", is_flag=True, default=False, help="Force reinstallation of module if it already exists")
+@click.option(
+ "-p",
+ "--prompt",
+ is_flag=True,
+ default=False,
+ help="Prompt for the version of the module",
+)
+@click.option(
+ "-f",
+ "--force",
+ is_flag=True,
+ default=False,
+ help="Force reinstallation of module if it already exists",
+)
@click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA")
def modules_install(ctx, tool, dir, prompt, force, sha):
"""
@@ -672,10 +831,21 @@ def modules_install(ctx, tool, dir, prompt, force, sha):
help=r"Pipeline directory. [dim]\[default: current working directory][/]",
)
@click.option("-f", "--force", is_flag=True, default=False, help="Force update of module")
-@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module")
+@click.option(
+ "-p",
+ "--prompt",
+ is_flag=True,
+ default=False,
+ help="Prompt for the version of the module",
+)
@click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA")
@click.option(
- "-a", "--all", "install_all", is_flag=True, default=False, help="Update all modules installed in pipeline"
+ "-a",
+ "--all",
+ "install_all",
+ is_flag=True,
+ default=False,
+ help="Update all modules installed in pipeline",
)
@click.option(
"-x/-y",
@@ -699,7 +869,18 @@ def modules_install(ctx, tool, dir, prompt, force, sha):
default=False,
help="Automatically update all linked modules and subworkflows without asking for confirmation",
)
-def modules_update(ctx, tool, directory, force, prompt, sha, install_all, preview, save_diff, update_deps):
+def modules_update(
+ ctx,
+ tool,
+ directory,
+ force,
+ prompt,
+ sha,
+ install_all,
+ preview,
+ save_diff,
+ update_deps,
+):
"""
Update DSL2 modules within a pipeline.
@@ -801,13 +982,55 @@ def modules_remove(ctx, dir, tool):
@click.pass_context
@click.argument("tool", type=str, required=False, metavar=" or ")
@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="")
-@click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username prefixed with '@'")
-@click.option("-l", "--label", type=str, metavar="", help="Standard resource label for process")
-@click.option("-m", "--meta", is_flag=True, default=False, help="Use Groovy meta map for sample information")
-@click.option("-n", "--no-meta", is_flag=True, default=False, help="Don't use meta map for sample information")
-@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist")
-@click.option("-c", "--conda-name", type=str, default=None, help="Name of the conda package to use")
-@click.option("-p", "--conda-package-version", type=str, default=None, help="Version of conda package to use")
+@click.option(
+ "-a",
+ "--author",
+ type=str,
+ metavar="",
+ help="Module author's GitHub username prefixed with '@'",
+)
+@click.option(
+ "-l",
+ "--label",
+ type=str,
+ metavar="",
+ help="Standard resource label for process",
+)
+@click.option(
+ "-m",
+ "--meta",
+ is_flag=True,
+ default=False,
+ help="Use Groovy meta map for sample information",
+)
+@click.option(
+ "-n",
+ "--no-meta",
+ is_flag=True,
+ default=False,
+ help="Don't use meta map for sample information",
+)
+@click.option(
+ "-f",
+ "--force",
+ is_flag=True,
+ default=False,
+ help="Overwrite any files if they already exist",
+)
+@click.option(
+ "-c",
+ "--conda-name",
+ type=str,
+ default=None,
+ help="Name of the conda package to use",
+)
+@click.option(
+ "-p",
+ "--conda-package-version",
+ type=str,
+ default=None,
+ help="Version of conda package to use",
+)
@click.option(
"-i",
"--empty-template",
@@ -815,7 +1038,12 @@ def modules_remove(ctx, dir, tool):
default=False,
help="Create a module from the template without TODOs or examples",
)
-@click.option("--migrate-pytest", is_flag=True, default=False, help="Migrate a module with pytest tests to nf-test")
+@click.option(
+ "--migrate-pytest",
+ is_flag=True,
+ default=False,
+ help="Migrate a module with pytest tests to nf-test",
+)
def create_module(
ctx,
tool,
@@ -853,7 +1081,16 @@ def create_module(
# Run function
try:
module_create = ModuleCreate(
- dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template, migrate_pytest
+ dir,
+ tool,
+ author,
+ label,
+ has_meta,
+ force,
+ conda_name,
+ conda_package_version,
+ empty_template,
+ migrate_pytest,
)
module_create.create()
except UserWarning as e:
@@ -868,10 +1105,28 @@ def create_module(
@modules.command("test")
@click.pass_context
@click.argument("tool", type=str, required=False, metavar=" or ")
-@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="")
-@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting")
+@click.option(
+ "-d",
+ "--dir",
+ type=click.Path(exists=True),
+ default=".",
+ metavar="",
+)
+@click.option(
+ "-p",
+ "--no-prompts",
+ is_flag=True,
+ default=False,
+ help="Use defaults without prompting",
+)
@click.option("-u", "--update", is_flag=True, default=False, help="Update existing snapshots")
-@click.option("-o", "--once", is_flag=True, default=False, help="Run tests only once. Don't check snapshot stability")
+@click.option(
+ "-o",
+ "--once",
+ is_flag=True,
+ default=False,
+ help="Run tests only once. Don't check snapshot stability",
+)
def test_module(ctx, tool, dir, no_prompts, update, once):
"""
Run nf-test for a module.
@@ -902,7 +1157,13 @@ def test_module(ctx, tool, dir, no_prompts, update, once):
@modules.command("lint")
@click.pass_context
@click.argument("tool", type=str, required=False, metavar=" or ")
-@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="")
+@click.option(
+ "-d",
+ "--dir",
+ type=click.Path(exists=True),
+ default=".",
+ metavar="",
+)
@click.option(
"-r",
"--registry",
@@ -911,7 +1172,14 @@ def test_module(ctx, tool, dir, no_prompts, update, once):
default=None,
help="Registry to use for containers. If not specified it will use docker.registry value in the nextflow.config file",
)
-@click.option("-k", "--key", type=str, metavar="", multiple=True, help="Run only these lint tests")
+@click.option(
+ "-k",
+ "--key",
+ type=str,
+ metavar="",
+ multiple=True,
+ help="Run only these lint tests",
+)
@click.option("-a", "--all", is_flag=True, help="Run on all modules")
@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures")
@click.option("--local", is_flag=True, help="Run additional lint tests for local modules")
@@ -923,7 +1191,11 @@ def test_module(ctx, tool, dir, no_prompts, update, once):
help="Sort lint output by module or test name.",
show_default=True,
)
-@click.option("--fix-version", is_flag=True, help="Fix the module version if a newer version is available")
+@click.option(
+ "--fix-version",
+ is_flag=True,
+ help="Fix the module version if a newer version is available",
+)
def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version):
"""
Lint one or more modules in a directory.
@@ -934,7 +1206,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed,
Test modules within a pipeline or a clone of the
nf-core/modules repository.
"""
- from nf_core.components.lint import LintException
+ from nf_core.components.lint import LintExceptionError
from nf_core.modules import ModuleLint
try:
@@ -960,7 +1232,7 @@ def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed,
)
if len(module_lint.failed) > 0:
sys.exit(1)
- except LintException as e:
+ except LintExceptionError as e:
log.error(e)
sys.exit(1)
except (UserWarning, LookupError) as e:
@@ -1011,7 +1283,13 @@ def modules_info(ctx, tool, dir):
@modules.command()
@click.pass_context
@click.argument("tool", type=str, required=False, metavar=" or ")
-@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="")
+@click.option(
+ "-d",
+ "--dir",
+ type=click.Path(exists=True),
+ default=".",
+ metavar="",
+)
@click.option("-a", "--all", is_flag=True, help="Run on all modules")
@click.option("-s", "--show-all", is_flag=True, help="Show up-to-date modules in results too")
def bump_versions(ctx, tool, dir, all, show_all):
@@ -1020,7 +1298,7 @@ def bump_versions(ctx, tool, dir, all, show_all):
the nf-core/modules repo.
"""
from nf_core.modules.bump_versions import ModuleVersionBumper
- from nf_core.modules.modules_utils import ModuleException
+ from nf_core.modules.modules_utils import ModuleExceptionError
try:
version_bumper = ModuleVersionBumper(
@@ -1030,7 +1308,7 @@ def bump_versions(ctx, tool, dir, all, show_all):
ctx.obj["modules_repo_no_pull"],
)
version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all)
- except ModuleException as e:
+ except ModuleExceptionError as e:
log.error(e)
sys.exit(1)
except (UserWarning, LookupError) as e:
@@ -1043,9 +1321,26 @@ def bump_versions(ctx, tool, dir, all, show_all):
@click.pass_context
@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name")
@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="")
-@click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username prefixed with '@'")
-@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist")
-@click.option("--migrate-pytest", is_flag=True, default=False, help="Migrate a module with pytest tests to nf-test")
+@click.option(
+ "-a",
+ "--author",
+ type=str,
+ metavar="",
+ help="Module author's GitHub username prefixed with '@'",
+)
+@click.option(
+ "-f",
+ "--force",
+ is_flag=True,
+ default=False,
+ help="Overwrite any files if they already exist",
+)
+@click.option(
+ "--migrate-pytest",
+ is_flag=True,
+ default=False,
+ help="Migrate a module with pytest tests to nf-test",
+)
def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest):
"""
Create a new subworkflow from the nf-core template.
@@ -1074,10 +1369,28 @@ def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest):
@subworkflows.command("test")
@click.pass_context
@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name")
-@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="")
-@click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting")
+@click.option(
+ "-d",
+ "--dir",
+ type=click.Path(exists=True),
+ default=".",
+ metavar="",
+)
+@click.option(
+ "-p",
+ "--no-prompts",
+ is_flag=True,
+ default=False,
+ help="Use defaults without prompting",
+)
@click.option("-u", "--update", is_flag=True, default=False, help="Update existing snapshots")
-@click.option("-o", "--once", is_flag=True, default=False, help="Run tests only once. Don't check snapshot stability")
+@click.option(
+ "-o",
+ "--once",
+ is_flag=True,
+ default=False,
+ help="Run tests only once. Don't check snapshot stability",
+)
def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once):
"""
Run nf-test for a subworkflow.
@@ -1176,7 +1489,13 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi
@subworkflows.command("lint")
@click.pass_context
@click.argument("subworkflow", type=str, required=False, metavar="subworkflow name")
-@click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="")
+@click.option(
+ "-d",
+ "--dir",
+ type=click.Path(exists=True),
+ default=".",
+ metavar="",
+)
@click.option(
"-r",
"--registry",
@@ -1185,7 +1504,14 @@ def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefi
default=None,
help="Registry to use for containers. If not specified it will use docker.registry value in the nextflow.config file",
)
-@click.option("-k", "--key", type=str, metavar="", multiple=True, help="Run only these lint tests")
+@click.option(
+ "-k",
+ "--key",
+ type=str,
+ metavar="",
+ multiple=True,
+ help="Run only these lint tests",
+)
@click.option("-a", "--all", is_flag=True, help="Run on all subworkflows")
@click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures")
@click.option("--local", is_flag=True, help="Run additional lint tests for local subworkflows")
@@ -1207,7 +1533,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo
Test subworkflows within a pipeline or a clone of the
nf-core/modules repository.
"""
- from nf_core.components.lint import LintException
+ from nf_core.components.lint import LintExceptionError
from nf_core.subworkflows import SubworkflowLint
try:
@@ -1232,7 +1558,7 @@ def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, lo
)
if len(subworkflow_lint.failed) > 0:
sys.exit(1)
- except LintException as e:
+ except LintExceptionError as e:
log.error(e)
sys.exit(1)
except (UserWarning, LookupError) as e:
@@ -1290,11 +1616,27 @@ def subworkflows_info(ctx, tool, dir):
default=".",
help=r"Pipeline directory. [dim]\[default: current working directory][/]",
)
-@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the subworkflow")
@click.option(
- "-f", "--force", is_flag=True, default=False, help="Force reinstallation of subworkflow if it already exists"
+ "-p",
+ "--prompt",
+ is_flag=True,
+ default=False,
+ help="Prompt for the version of the subworkflow",
+)
+@click.option(
+ "-f",
+ "--force",
+ is_flag=True,
+ default=False,
+ help="Force reinstallation of subworkflow if it already exists",
+)
+@click.option(
+ "-s",
+ "--sha",
+ type=str,
+ metavar="",
+ help="Install subworkflow at commit SHA",
)
-@click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA")
def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha):
"""
Install DSL2 subworkflow within a pipeline.
@@ -1363,10 +1705,27 @@ def subworkflows_remove(ctx, dir, subworkflow):
help=r"Pipeline directory. [dim]\[default: current working directory][/]",
)
@click.option("-f", "--force", is_flag=True, default=False, help="Force update of subworkflow")
-@click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the subworkflow")
-@click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA")
@click.option(
- "-a", "--all", "install_all", is_flag=True, default=False, help="Update all subworkflow installed in pipeline"
+ "-p",
+ "--prompt",
+ is_flag=True,
+ default=False,
+ help="Prompt for the version of the subworkflow",
+)
+@click.option(
+ "-s",
+ "--sha",
+ type=str,
+ metavar="",
+ help="Install subworkflow at commit SHA",
+)
+@click.option(
+ "-a",
+ "--all",
+ "install_all",
+ is_flag=True,
+ default=False,
+ help="Update all subworkflow installed in pipeline",
)
@click.option(
"-x/-y",
@@ -1390,7 +1749,18 @@ def subworkflows_remove(ctx, dir, subworkflow):
default=False,
help="Automatically update all linked modules and subworkflows without asking for confirmation",
)
-def subworkflows_update(ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps):
+def subworkflows_update(
+ ctx,
+ subworkflow,
+ dir,
+ force,
+ prompt,
+ sha,
+ install_all,
+ preview,
+ save_diff,
+ update_deps,
+):
"""
Update DSL2 subworkflow within a pipeline.
@@ -1472,12 +1842,20 @@ def validate(pipeline, params):
default=".",
help=r"Pipeline directory. [dim]\[default: current working directory][/]",
)
-@click.option("--no-prompts", is_flag=True, help="Do not confirm changes, just update parameters and exit")
-@click.option("--web-only", is_flag=True, help="Skip building using Nextflow config, just launch the web tool")
+@click.option(
+ "--no-prompts",
+ is_flag=True,
+ help="Do not confirm changes, just update parameters and exit",
+)
+@click.option(
+ "--web-only",
+ is_flag=True,
+ help="Skip building using Nextflow config, just launch the web tool",
+)
@click.option(
"--url",
type=str,
- default="https://nf-co.re/pipeline_schema_builder",
+ default="https://oldsite.nf-co.re/pipeline_schema_builder",
help="Customise the builder URL (for development work)",
)
def build(dir, no_prompts, web_only, url):
@@ -1506,7 +1884,10 @@ def build(dir, no_prompts, web_only, url):
# nf-core schema lint
@schema.command("lint")
@click.argument(
- "schema_path", type=click.Path(exists=True), default="nextflow_schema.json", metavar=""
+ "schema_path",
+ type=click.Path(exists=True),
+ default="nextflow_schema.json",
+ metavar="",
)
def schema_lint(schema_path):
"""
@@ -1543,9 +1924,19 @@ def schema_lint(schema_path):
required=False,
metavar="",
)
-@click.option("-o", "--output", type=str, metavar="", help="Output filename. Defaults to standard out.")
@click.option(
- "-x", "--format", type=click.Choice(["markdown", "html"]), default="markdown", help="Format to output docs in."
+ "-o",
+ "--output",
+ type=str,
+ metavar="",
+ help="Output filename. Defaults to standard out.",
+)
+@click.option(
+ "-x",
+ "--format",
+ type=click.Choice(["markdown", "html"]),
+ default="markdown",
+ help="Format to output docs in.",
)
@click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files")
@click.option(
@@ -1584,7 +1975,11 @@ def docs(schema_path, output, format, force, columns):
help=r"Pipeline directory. [dim]\[default: current working directory][/]",
)
@click.option(
- "-n", "--nextflow", is_flag=True, default=False, help="Bump required nextflow version instead of pipeline version"
+ "-n",
+ "--nextflow",
+ is_flag=True,
+ default=False,
+ help="Bump required nextflow version instead of pipeline version",
)
def bump_version(new_version, dir, nextflow):
"""
@@ -1629,8 +2024,19 @@ def bump_version(new_version, dir, nextflow):
default=".",
help=r"Pipeline directory. [dim]\[default: current working directory][/]",
)
-@click.option("-b", "--from-branch", type=str, help="The git branch to use to fetch workflow variables.")
-@click.option("-p", "--pull-request", is_flag=True, default=False, help="Make a GitHub pull-request with the changes.")
+@click.option(
+ "-b",
+ "--from-branch",
+ type=str,
+ help="The git branch to use to fetch workflow variables.",
+)
+@click.option(
+ "-p",
+ "--pull-request",
+ is_flag=True,
+ default=False,
+ help="Make a GitHub pull-request with the changes.",
+)
@click.option("-g", "--github-repository", type=str, help="GitHub PR: target repository.")
@click.option("-u", "--username", type=str, help="GitHub PR: auth username.")
@click.option("-t", "--template-yaml", help="Pass a YAML file to customize the template")
@@ -1647,7 +2053,7 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y
the pipeline. It is run automatically for all pipelines when ever a
new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made.
"""
- from nf_core.sync import PipelineSync, PullRequestException, SyncException
+ from nf_core.sync import PipelineSync, PullRequestExceptionError, SyncExceptionError
from nf_core.utils import is_pipeline_directory
# Check if pipeline directory contains necessary files
@@ -1657,11 +2063,42 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y
sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml)
try:
sync_obj.sync()
- except (SyncException, PullRequestException) as e:
+ except (SyncExceptionError, PullRequestExceptionError) as e:
log.error(e)
sys.exit(1)
+# nf-core rocrate
+@nf_core_cli.command("rocrate")
+@click.argument(
+ "pipeline_dir",
+ type=click.Path(exists=True),
+ default=Path.cwd(),
+ required=True,
+ metavar="",
+)
+@click.option(
+ "-j",
+ "--json",
+ default="ro-crate-metadata.json",
+ type=str,
+ help="Path to save RO Crate metadata json",
+)
+@click.option("-z", "--zip", type=str, help="Path to save RO Crate zip")
+def rocrate(pipeline_dir, json, zip):
+ """
+ Make an Research Object Crate
+ """
+ import nf_core.ro_crate
+
+ if json is None and zip is None:
+ log.error("Either --json or --zip must be specified")
+ sys.exit(1)
+ pipeline_dir = Path(pipeline_dir)
+ rocrate_obj = nf_core.ro_crate.RoCrate(pipeline_dir)
+ rocrate_obj.create_ro_crate(pipeline_dir, metadata_fn=json, zip_fn=zip)
+
+
# Main script is being run - launch the CLI
if __name__ == "__main__":
run_nf_core()
diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py
index 40c8f8984f..c5e8931fbc 100644
--- a/nf_core/bump_version.py
+++ b/nf_core/bump_version.py
@@ -68,7 +68,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None:
[
(
f"/releases/tag/{current_version}",
- f"/tree/dev",
+ "/tree/dev",
)
],
)
@@ -78,7 +78,7 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None:
pipeline_obj,
[
(
- f"/tree/dev",
+ "/tree/dev",
f"/releases/tag/{multiqc_new_version}",
)
],
@@ -187,7 +187,7 @@ def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patt
fn = pipeline_obj._fp(filename)
content = ""
try:
- with open(fn, "r") as fh:
+ with open(fn) as fh:
content = fh.read()
except FileNotFoundError:
log.warning(f"File not found: '{fn}'")
diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py
index 44924a2704..8332429835 100644
--- a/nf_core/components/components_command.py
+++ b/nf_core/components/components_command.py
@@ -227,7 +227,7 @@ def check_patch_paths(self, patch_path: Path, module_name: str) -> None:
if patch_path.exists():
log.info(f"Modules {module_name} contains a patch file.")
rewrite = False
- with open(patch_path, "r") as fh:
+ with open(patch_path) as fh:
lines = fh.readlines()
for index, line in enumerate(lines):
# Check if there are old paths in the patch file and replace
@@ -264,7 +264,7 @@ def check_if_in_include_stmts(self, component_path: str) -> Dict[str, List[Dict[
if self.repo_type == "pipeline":
workflow_files = Path(self.dir, "workflows").glob("*.nf")
for workflow_file in workflow_files:
- with open(workflow_file, "r") as fh:
+ with open(workflow_file) as fh:
# Check if component path is in the file using mmap
with mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ) as s:
if s.find(component_path.encode()) != -1:
diff --git a/nf_core/components/components_utils.py b/nf_core/components/components_utils.py
index eec533ce60..01650a643d 100644
--- a/nf_core/components/components_utils.py
+++ b/nf_core/components/components_utils.py
@@ -53,7 +53,7 @@ def get_repo_info(directory: str, use_prompt: Optional[bool] = True) -> Tuple[st
raise UserWarning("Repository type could not be established")
# Check if it's a valid answer
- if not repo_type in ["pipeline", "modules"]:
+ if repo_type not in ["pipeline", "modules"]:
raise UserWarning(f"Invalid repository type: '{repo_type}'")
# Check for org if modules repo
@@ -138,7 +138,7 @@ def get_components_to_install(subworkflow_dir: str) -> Tuple[List[str], List[str
"""
modules = []
subworkflows = []
- with open(Path(subworkflow_dir, "main.nf"), "r") as fh:
+ with open(Path(subworkflow_dir, "main.nf")) as fh:
for line in fh:
regex = re.compile(
r"include(?: *{ *)([a-zA-Z\_0-9]*)(?: *as *)?(?:[a-zA-Z\_0-9]*)?(?: *})(?: *from *)(?:'|\")(.*)(?:'|\")"
diff --git a/nf_core/components/create.py b/nf_core/components/create.py
index 568ca22af5..32f6d1a433 100644
--- a/nf_core/components/create.py
+++ b/nf_core/components/create.py
@@ -2,7 +2,6 @@
The ComponentCreate class handles generating of module and subworkflow templates
"""
-from __future__ import print_function
import glob
import json
@@ -440,12 +439,13 @@ def _copy_old_files(self, component_old_path):
pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir)
nextflow_config = pytest_dir / "nextflow.config"
if nextflow_config.is_file():
- with open(nextflow_config, "r") as fh:
+ with open(nextflow_config) as fh:
config_lines = ""
for line in fh:
- if "publishDir" not in line:
+ if "publishDir" not in line and line.strip() != "":
config_lines += line
- if len(config_lines) > 0:
+ # if the nextflow.config file only contained publishDir, non_publish_dir_lines will be 11 characters long (`process {\n}`)
+ if len(config_lines) > 11:
log.debug("Copying nextflow.config file from pytest tests")
with open(
Path(self.directory, self.component_type, self.org, self.component_dir, "tests", "nextflow.config"),
@@ -460,7 +460,7 @@ def _print_and_delete_pytest_files(self):
"[violet]Do you want to delete the pytest files?[/]\nPytest file 'main.nf' will be printed to standard output to allow migrating the tests manually to 'main.nf.test'.",
default=False,
):
- with open(pytest_dir / "main.nf", "r") as fh:
+ with open(pytest_dir / "main.nf") as fh:
log.info(fh.read())
shutil.rmtree(pytest_dir)
log.info(
@@ -475,7 +475,7 @@ def _print_and_delete_pytest_files(self):
)
# Delete tags from pytest_modules.yml
modules_yml = Path(self.directory, "tests", "config", "pytest_modules.yml")
- with open(modules_yml, "r") as fh:
+ with open(modules_yml) as fh:
yml_file = yaml.safe_load(fh)
yml_key = str(self.component_dir) if self.component_type == "modules" else f"subworkflows/{self.component_dir}"
if yml_key in yml_file:
diff --git a/nf_core/components/info.py b/nf_core/components/info.py
index e4d8038b87..54fc0004dc 100644
--- a/nf_core/components/info.py
+++ b/nf_core/components/info.py
@@ -184,7 +184,7 @@ def get_local_yaml(self):
meta_fn = Path(comp_dir, "meta.yml")
if meta_fn.exists():
log.debug(f"Found local file: {meta_fn}")
- with open(meta_fn, "r") as fh:
+ with open(meta_fn) as fh:
self.local_path = comp_dir
return yaml.safe_load(fh)
@@ -196,7 +196,7 @@ def get_local_yaml(self):
meta_fn = Path(comp_dir, "meta.yml")
if meta_fn.exists():
log.debug(f"Found local file: {meta_fn}")
- with open(meta_fn, "r") as fh:
+ with open(meta_fn) as fh:
self.local_path = comp_dir
return yaml.safe_load(fh)
log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally")
diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py
index efffc28e85..3c2fb9dde3 100644
--- a/nf_core/components/lint/__init__.py
+++ b/nf_core/components/lint/__init__.py
@@ -3,7 +3,6 @@
in nf-core pipelines
"""
-from __future__ import print_function
import logging
import operator
@@ -27,7 +26,7 @@
log = logging.getLogger(__name__)
-class LintException(Exception):
+class LintExceptionError(Exception):
"""Exception raised when there was an error with module or subworkflow linting"""
pass
@@ -216,7 +215,7 @@ def _print_results(self, show_passed=False, sort_by="test"):
try:
for lint_result in tests:
max_name_len = max(len(lint_result.component_name), max_name_len)
- except:
+ except Exception:
pass
# Helper function to format test links nicely
diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py
index 874fa570bc..2f73afe9d3 100644
--- a/nf_core/components/nfcore_component.py
+++ b/nf_core/components/nfcore_component.py
@@ -81,7 +81,7 @@ def __init__(
def _get_main_nf_tags(self, test_main_nf: Union[Path, str]):
"""Collect all tags from the main.nf.test file."""
tags = []
- with open(test_main_nf, "r") as fh:
+ with open(test_main_nf) as fh:
for line in fh:
if line.strip().startswith("tag"):
tags.append(line.strip().split()[1].strip('"'))
@@ -90,7 +90,7 @@ def _get_main_nf_tags(self, test_main_nf: Union[Path, str]):
def _get_included_components(self, main_nf: Union[Path, str]):
"""Collect all included components from the main.nf file."""
included_components = []
- with open(main_nf, "r") as fh:
+ with open(main_nf) as fh:
for line in fh:
if line.strip().startswith("include"):
# get tool/subtool or subworkflow name from include statement, can be in the form
@@ -107,7 +107,7 @@ def _get_included_components(self, main_nf: Union[Path, str]):
def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, str]):
"""Collect all included components from the main.nf file."""
included_components = []
- with open(main_nf_test, "r") as fh:
+ with open(main_nf_test) as fh:
for line in fh:
if line.strip().startswith("script"):
# get tool/subtool or subworkflow name from script statement, can be:
@@ -151,7 +151,7 @@ def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, st
def get_inputs_from_main_nf(self):
"""Collect all inputs from the main.nf file."""
inputs = []
- with open(self.main_nf, "r") as f:
+ with open(self.main_nf) as f:
data = f.read()
# get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo
# regex matches:
@@ -168,17 +168,19 @@ def get_inputs_from_main_nf(self):
input_data = data.split("input:")[1].split("output:")[0]
regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))"
matches = re.finditer(regex, input_data, re.MULTILINE)
- for matchNum, match in enumerate(matches, start=1):
+ for _, match in enumerate(matches, start=1):
if match.group(3):
- inputs.append(match.group(3))
+ input_val = match.group(3).split(",")[0] # handle `files, stageAs: "inputs/*"` cases
+ inputs.append(input_val)
elif match.group(4):
- inputs.append(match.group(4))
+ input_val = match.group(4).split(",")[0] # handle `files, stageAs: "inputs/*"` cases
+ inputs.append(input_val)
log.info(f"Found {len(inputs)} inputs in {self.main_nf}")
self.inputs = inputs
def get_outputs_from_main_nf(self):
outputs = []
- with open(self.main_nf, "r") as f:
+ with open(self.main_nf) as f:
data = f.read()
# get output values from main.nf after "output:". the names are always after "emit:"
if "output:" not in data:
@@ -187,7 +189,7 @@ def get_outputs_from_main_nf(self):
output_data = data.split("output:")[1].split("when:")[0]
regex = r"emit:\s*([^)\s,]+)"
matches = re.finditer(regex, output_data, re.MULTILINE)
- for matchNum, match in enumerate(matches, start=1):
+ for _, match in enumerate(matches, start=1):
outputs.append(match.group(1))
log.info(f"Found {len(outputs)} outputs in {self.main_nf}")
self.outputs = outputs
diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py
index 28f2f886b1..55d5747451 100644
--- a/nf_core/components/patch.py
+++ b/nf_core/components/patch.py
@@ -35,7 +35,7 @@ def _parameter_checks(self, component):
if component is not None and component not in component_names:
component_dir = [dir for dir, m in components if m == component][0]
raise UserWarning(
- f"{self.component_type[:-1].title()} '{Path(self.component_type, component_dir, module)}' does not exist in the pipeline"
+ f"{self.component_type[:-1].title()} '{Path(self.component_type, component_dir, component)}' does not exist in the pipeline"
)
def patch(self, component=None):
@@ -220,5 +220,5 @@ def remove(self, component):
):
log.error(
f"Module files do not appear to match the remote for the commit sha in the 'module.json': {component_version}\n"
- f"Recommend reinstalling with 'nf-core modules install --force --sha {component_version} {module}' "
+ f"Recommend reinstalling with 'nf-core modules install --force --sha {component_version} {component}' "
)
diff --git a/nf_core/create.py b/nf_core/create.py
index 56d0912a07..0d5b7e4d7b 100644
--- a/nf_core/create.py
+++ b/nf_core/create.py
@@ -108,7 +108,7 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa
# Obtain template customization info from template yaml file or `.nf-core.yml` config file
try:
if template_yaml_path is not None:
- with open(template_yaml_path, "r") as f:
+ with open(template_yaml_path) as f:
template_yaml = yaml.safe_load(f)
elif "template" in config_yml:
template_yaml = config_yml["template"]
@@ -395,7 +395,7 @@ def remove_nf_core_in_bug_report_template(self):
"""
bug_report_path = self.outdir / ".github" / "ISSUE_TEMPLATE" / "bug_report.yml"
- with open(bug_report_path, "r") as fh:
+ with open(bug_report_path) as fh:
contents = yaml.load(fh, Loader=yaml.FullLoader)
# Remove the first item in the body, which is the information about the docs
diff --git a/nf_core/download.py b/nf_core/download.py
index 08bef935ba..4c0bc97f42 100644
--- a/nf_core/download.py
+++ b/nf_core/download.py
@@ -1,6 +1,5 @@
"""Downloads a nf-core pipeline to the local file system."""
-from __future__ import print_function
import concurrent.futures
import io
@@ -21,7 +20,7 @@
import rich
import rich.progress
from git.exc import GitCommandError, InvalidGitRepositoryError
-from pkg_resources import parse_version as VersionParser
+from pkg_resources import parse_version as version_parser
import nf_core
import nf_core.list
@@ -551,7 +550,7 @@ def read_remote_containers(self):
self.containers_remote = sorted(list(set(self.containers_remote)))
except (FileNotFoundError, LookupError) as e:
log.error(f"[red]Issue with reading the specified remote $NXF_SINGULARITY_CACHE index:[/]\n{e}\n")
- if stderr.is_interactive and rich.prompt.Confirm.ask(f"[blue]Specify a new index file and try again?"):
+ if stderr.is_interactive and rich.prompt.Confirm.ask("[blue]Specify a new index file and try again?"):
self.container_cache_index = None # reset chosen path to index file.
self.prompt_singularity_cachedir_remote()
else:
@@ -640,7 +639,7 @@ def wf_use_local_configs(self, revision_dirname):
log.debug(f"Editing 'params.custom_config_base' in '{nfconfig_fn}'")
# Load the nextflow.config file into memory
- with open(nfconfig_fn, "r") as nfconfig_fh:
+ with open(nfconfig_fn) as nfconfig_fh:
nfconfig = nfconfig_fh.read()
# Replace the target string
@@ -700,7 +699,7 @@ def find_container_images(self, workflow_directory):
if bool(config_findings_dsl2):
# finding fill always be a tuple of length 2, first the quote used and second the enquoted value.
for finding in config_findings_dsl2:
- config_findings.append((finding + (self.nf_config, "Nextflow configs")))
+ config_findings.append(finding + (self.nf_config, "Nextflow configs"))
else: # no regex match, likely just plain string
"""
Append string also as finding-like tuple for consistency
@@ -719,7 +718,7 @@ def find_container_images(self, workflow_directory):
for file in files:
if file.endswith(".nf"):
file_path = os.path.join(subdir, file)
- with open(file_path, "r") as fh:
+ with open(file_path) as fh:
# Look for any lines with container "xxx" or container 'xxx'
search_space = fh.read()
"""
@@ -744,7 +743,7 @@ def find_container_images(self, workflow_directory):
for finding in local_module_findings:
# append finding since we want to collect them from all modules
# also append search_space because we need to start over later if nothing was found.
- module_findings.append((finding + (search_space, file_path)))
+ module_findings.append(finding + (search_space, file_path))
# Not sure if there will ever be multiple container definitions per module, but beware DSL3.
# Like above run on shallow copy, because length may change at runtime.
@@ -853,7 +852,7 @@ def rectify_raw_container_matches(self, raw_findings):
['https://depot.galaxyproject.org/singularity/scanpy:1.7.2--pyhdfd78af_0', 'biocontainers/scanpy:1.7.2--pyhdfd78af_0']
"""
container_value_defs = [
- capture for _, capture in container_value_defs[:] if not capture in ["singularity", "apptainer"]
+ capture for _, capture in container_value_defs[:] if capture not in ["singularity", "apptainer"]
]
"""
@@ -1066,10 +1065,10 @@ def get_singularity_images(self, current_revision=""):
self.singularity_pull_image(*container, library, progress)
# Pulling the image was successful, no ContainerError was raised, break the library loop
break
- except ContainerError.ImageExists as e:
+ except ContainerError.ImageExistsError:
# Pulling not required
break
- except ContainerError.RegistryNotFound as e:
+ except ContainerError.RegistryNotFoundError as e:
self.container_library.remove(library)
# The only library was removed
if not self.container_library:
@@ -1079,13 +1078,13 @@ def get_singularity_images(self, current_revision=""):
else:
# Other libraries can be used
continue
- except ContainerError.ImageNotFound as e:
+ except ContainerError.ImageNotFoundError as e:
# Try other registries
if e.error_log.absolute_URI:
break # there no point in trying other registries if absolute URI was specified.
else:
continue
- except ContainerError.InvalidTag as e:
+ except ContainerError.InvalidTagError:
# Try other registries
continue
except ContainerError.OtherError as e:
@@ -1524,7 +1523,7 @@ def tidy_tags_and_branches(self):
else:
# desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns.
valid_versions = [
- VersionParser(v)
+ version_parser(v)
for v in desired_revisions
if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v)
]
@@ -1583,7 +1582,7 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula
for line in error_msg:
if re.search(r"dial\stcp.*no\ssuch\shost", line):
- self.error_type = self.RegistryNotFound(self)
+ self.error_type = self.RegistryNotFoundError(self)
break
elif (
re.search(r"requested\saccess\sto\sthe\sresource\sis\sdenied", line)
@@ -1595,13 +1594,13 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula
# unauthorized: authentication required
# Quay.io: StatusCode: 404, \n']
# ghcr.io: Requesting bearer token: invalid status code from registry 400 (Bad Request)
- self.error_type = self.ImageNotFound(self)
+ self.error_type = self.ImageNotFoundError(self)
break
elif re.search(r"manifest\sunknown", line):
- self.error_type = self.InvalidTag(self)
+ self.error_type = self.InvalidTagError(self)
break
elif re.search(r"Image\sfile\salready\sexists", line):
- self.error_type = self.ImageExists(self)
+ self.error_type = self.ImageExistsError(self)
break
else:
continue
@@ -1615,7 +1614,7 @@ def __init__(self, container, registry, address, absolute_URI, out_path, singula
raise self.error_type
- class RegistryNotFound(ConnectionRefusedError):
+ class RegistryNotFoundError(ConnectionRefusedError):
"""The specified registry does not resolve to a valid IP address"""
def __init__(self, error_log):
@@ -1628,7 +1627,7 @@ def __init__(self, error_log):
)
super().__init__(self.message, self.helpmessage, self.error_log)
- class ImageNotFound(FileNotFoundError):
+ class ImageNotFoundError(FileNotFoundError):
"""The image can not be found in the registry"""
def __init__(self, error_log):
@@ -1644,7 +1643,7 @@ def __init__(self, error_log):
super().__init__(self.message)
- class InvalidTag(AttributeError):
+ class InvalidTagError(AttributeError):
"""Image and registry are valid, but the (version) tag is not"""
def __init__(self, error_log):
@@ -1653,7 +1652,7 @@ def __init__(self, error_log):
self.helpmessage = f'Please chose a different library than {self.error_log.registry}\nor try to locate the "{self.error_log.address.split(":")[-1]}" version of "{self.error_log.container}" manually.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n'
super().__init__(self.message)
- class ImageExists(FileExistsError):
+ class ImageExistsError(FileExistsError):
"""Image already exists in cache/output directory."""
def __init__(self, error_log):
diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile
index e721f210d0..ad4bed5052 100644
--- a/nf_core/gitpod/gitpod.Dockerfile
+++ b/nf_core/gitpod/gitpod.Dockerfile
@@ -47,19 +47,23 @@ RUN conda config --add channels defaults && \
conda config --add channels conda-forge && \
conda config --set channel_priority strict && \
conda install --quiet --yes --name base \
- mamba \
- nextflow \
- nf-core \
- nf-test \
- black \
- prettier \
- pre-commit \
- openjdk \
- pytest-workflow && \
+ mamba \
+ nextflow \
+ nf-core \
+ nf-test \
+ prettier \
+ pre-commit \
+ ruff \
+ openjdk \
+ pytest-workflow && \
conda clean --all --force-pkgs-dirs --yes
# Update Nextflow
RUN nextflow self-update
# Install nf-core
-RUN python -m pip install .
+RUN python -m pip install . --no-cache-dir
+
+# Setup pdiff for nf-test diffs
+RUN export NFT_DIFF="pdiff" && \
+ export NFT_DIFF_ARGS="--line-numbers --expand-tabs=2"
diff --git a/nf_core/launch.py b/nf_core/launch.py
index 363506c448..25bb4c150c 100644
--- a/nf_core/launch.py
+++ b/nf_core/launch.py
@@ -1,6 +1,5 @@
""" Launch a pipeline, interactively collecting params """
-from __future__ import print_function
import copy
import json
@@ -428,7 +427,7 @@ def prompt_param(self, param_id, param_obj, is_required, answers):
answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style)
# If required and got an empty reponse, ask again
- while type(answer[param_id]) is str and answer[param_id].strip() == "" and is_required:
+ while isinstance(answer[param_id], str) and answer[param_id].strip() == "" and is_required:
log.error(f"'--{param_id}' is required")
answer = questionary.unsafe_prompt([question], style=nf_core.utils.nfcore_question_style)
@@ -546,14 +545,14 @@ def single_param_to_questionary(self, param_id, param_obj, answers=None, print_h
# Start with the default from the param object
if "default" in param_obj:
# Boolean default is cast back to a string later - this just normalises all inputs
- if param_obj["type"] == "boolean" and type(param_obj["default"]) is str:
+ if param_obj["type"] == "boolean" and isinstance(param_obj["default"], str):
question["default"] = param_obj["default"].lower() == "true"
else:
question["default"] = param_obj["default"]
# Overwrite default with parsed schema, includes --params-in etc
if self.schema_obj is not None and param_id in self.schema_obj.input_params:
- if param_obj["type"] == "boolean" and type(self.schema_obj.input_params[param_id]) is str:
+ if param_obj["type"] == "boolean" and isinstance(self.schema_obj.input_params[param_id], str):
question["default"] = "true" == self.schema_obj.input_params[param_id].lower()
else:
question["default"] = self.schema_obj.input_params[param_id]
diff --git a/nf_core/licences.py b/nf_core/licences.py
index d686a56178..a8a35334dd 100644
--- a/nf_core/licences.py
+++ b/nf_core/licences.py
@@ -1,6 +1,5 @@
"""Lists software licences for a given workflow."""
-from __future__ import print_function
import json
import logging
diff --git a/nf_core/lint/actions_awsfulltest.py b/nf_core/lint/actions_awsfulltest.py
index e8e1c951b1..66aa3f99bf 100644
--- a/nf_core/lint/actions_awsfulltest.py
+++ b/nf_core/lint/actions_awsfulltest.py
@@ -32,7 +32,7 @@ def actions_awsfulltest(self):
fn = os.path.join(self.wf_path, ".github", "workflows", "awsfulltest.yml")
if os.path.isfile(fn):
try:
- with open(fn, "r") as fh:
+ with open(fn) as fh:
wf = yaml.safe_load(fh)
except Exception as e:
return {"failed": [f"Could not parse yaml file: {fn}, {e}"]}
diff --git a/nf_core/lint/actions_awstest.py b/nf_core/lint/actions_awstest.py
index ccdf0abf6a..7c55998944 100644
--- a/nf_core/lint/actions_awstest.py
+++ b/nf_core/lint/actions_awstest.py
@@ -27,7 +27,7 @@ def actions_awstest(self):
return {"ignored": [f"'awstest.yml' workflow not found: `{fn}`"]}
try:
- with open(fn, "r") as fh:
+ with open(fn) as fh:
wf = yaml.safe_load(fh)
except Exception as e:
return {"failed": [f"Could not parse yaml file: {fn}, {e}"]}
diff --git a/nf_core/lint/actions_ci.py b/nf_core/lint/actions_ci.py
index e669eceb8c..a3e7d54b66 100644
--- a/nf_core/lint/actions_ci.py
+++ b/nf_core/lint/actions_ci.py
@@ -1,5 +1,4 @@
import os
-import re
import yaml
@@ -48,7 +47,7 @@ def actions_ci(self):
return {"ignored": ["'.github/workflows/ci.yml' not found"]}
try:
- with open(fn, "r") as fh:
+ with open(fn) as fh:
ciwf = yaml.safe_load(fh)
except Exception as e:
return {"failed": [f"Could not parse yaml file: {fn}, {e}"]}
@@ -62,7 +61,7 @@ def actions_ci(self):
if not (
pr_subtree is None
or ("branches" in pr_subtree and "dev" in pr_subtree["branches"])
- or ("ignore_branches" in pr_subtree and not "dev" in pr_subtree["ignore_branches"])
+ or ("ignore_branches" in pr_subtree and "dev" not in pr_subtree["ignore_branches"])
):
raise AssertionError()
if "published" not in ciwf[True]["release"]["types"]:
diff --git a/nf_core/lint/actions_schema_validation.py b/nf_core/lint/actions_schema_validation.py
index 9d49b84c6b..fa4471d98c 100644
--- a/nf_core/lint/actions_schema_validation.py
+++ b/nf_core/lint/actions_schema_validation.py
@@ -36,7 +36,7 @@ def actions_schema_validation(self):
# load workflow
try:
- with open(wf_path, "r") as fh:
+ with open(wf_path) as fh:
wf_json = yaml.safe_load(fh)
except Exception as e:
failed.append(f"Could not parse yaml file: {wf}, {e}")
diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py
index 2b64d62638..82b286fb44 100644
--- a/nf_core/lint/files_unchanged.py
+++ b/nf_core/lint/files_unchanged.py
@@ -187,16 +187,16 @@ def _tf(file_path):
else:
for f in files:
try:
- with open(_pf(f), "r") as fh:
+ with open(_pf(f)) as fh:
pipeline_file = fh.read()
- with open(_tf(f), "r") as fh:
+ with open(_tf(f)) as fh:
template_file = fh.read()
if template_file in pipeline_file:
passed.append(f"`{f}` matches the template")
else:
if "files_unchanged" in self.fix:
# Try to fix the problem by overwriting the pipeline file
- with open(_tf(f), "r") as fh:
+ with open(_tf(f)) as fh:
template_file = fh.read()
with open(_pf(f), "w") as fh:
fh.write(template_file)
diff --git a/nf_core/lint/merge_markers.py b/nf_core/lint/merge_markers.py
index f33a5095d8..8ef425234b 100644
--- a/nf_core/lint/merge_markers.py
+++ b/nf_core/lint/merge_markers.py
@@ -1,5 +1,4 @@
import fnmatch
-import io
import logging
import os
@@ -23,9 +22,9 @@ def merge_markers(self):
ignore = [".git"]
if os.path.isfile(os.path.join(self.wf_path, ".gitignore")):
- with io.open(os.path.join(self.wf_path, ".gitignore"), "rt", encoding="latin1") as fh:
- for l in fh:
- ignore.append(os.path.basename(l.strip().rstrip("/")))
+ with open(os.path.join(self.wf_path, ".gitignore"), encoding="latin1") as fh:
+ for line in fh:
+ ignore.append(os.path.basename(line.strip().rstrip("/")))
for root, dirs, files in os.walk(self.wf_path, topdown=True):
# Ignore files
for i_base in ignore:
@@ -41,12 +40,12 @@ def merge_markers(self):
if nf_core.utils.is_file_binary(os.path.join(root, fname)):
continue
try:
- with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh:
- for l in fh:
- if ">>>>>>>" in l:
- failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {l[:30]}")
- if "<<<<<<<" in l:
- failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {l[:30]}")
+ with open(os.path.join(root, fname), encoding="latin1") as fh:
+ for line in fh:
+ if ">>>>>>>" in line:
+ failed.append(f"Merge marker '>>>>>>>' in `{os.path.join(root, fname)}`: {line[:30]}")
+ if "<<<<<<<" in line:
+ failed.append(f"Merge marker '<<<<<<<' in `{os.path.join(root, fname)}`: {line[:30]}")
except FileNotFoundError:
log.debug(f"Could not open file {os.path.join(root, fname)} in merge_markers lint test")
if len(failed) == 0:
diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py
index cbbeae07a8..b2f1a89a1b 100644
--- a/nf_core/lint/multiqc_config.py
+++ b/nf_core/lint/multiqc_config.py
@@ -34,7 +34,7 @@ def multiqc_config(self) -> Dict[str, List[str]]:
return {"ignored": ["'assets/multiqc_config.yml' not found"]}
try:
- with open(fn, "r") as fh:
+ with open(fn) as fh:
mqc_yml = yaml.safe_load(fh)
except Exception as e:
return {"failed": [f"Could not parse yaml file: {fn}, {e}"]}
diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py
index 24f1e5c12f..328bc03759 100644
--- a/nf_core/lint/nextflow_config.py
+++ b/nf_core/lint/nextflow_config.py
@@ -300,7 +300,7 @@ def nextflow_config(self):
]
path = os.path.join(self.wf_path, "nextflow.config")
i = 0
- with open(path, "r") as f:
+ with open(path) as f:
for line in f:
if lines[i] in line:
i += 1
@@ -320,7 +320,7 @@ def nextflow_config(self):
)
# Check for the availability of the "test" configuration profile by parsing nextflow.config
- with open(os.path.join(self.wf_path, "nextflow.config"), "r") as f:
+ with open(os.path.join(self.wf_path, "nextflow.config")) as f:
content = f.read()
# Remove comments
diff --git a/nf_core/lint/pipeline_todos.py b/nf_core/lint/pipeline_todos.py
index 890e227fa1..ba6ec79150 100644
--- a/nf_core/lint/pipeline_todos.py
+++ b/nf_core/lint/pipeline_todos.py
@@ -1,5 +1,4 @@
import fnmatch
-import io
import logging
import os
@@ -41,9 +40,9 @@ def pipeline_todos(self, root_dir=None):
ignore = [".git"]
if os.path.isfile(os.path.join(root_dir, ".gitignore")):
- with io.open(os.path.join(root_dir, ".gitignore"), "rt", encoding="latin1") as fh:
- for l in fh:
- ignore.append(os.path.basename(l.strip().rstrip("/")))
+ with open(os.path.join(root_dir, ".gitignore"), encoding="latin1") as fh:
+ for line in fh:
+ ignore.append(os.path.basename(line.strip().rstrip("/")))
for root, dirs, files in os.walk(root_dir, topdown=True):
# Ignore files
for i_base in ignore:
@@ -52,18 +51,18 @@ def pipeline_todos(self, root_dir=None):
files[:] = [f for f in files if not fnmatch.fnmatch(os.path.join(root, f), i)]
for fname in files:
try:
- with io.open(os.path.join(root, fname), "rt", encoding="latin1") as fh:
- for l in fh:
- if "TODO nf-core" in l:
- l = (
- l.replace("", "")
.replace("# TODO nf-core: ", "")
.replace("// TODO nf-core: ", "")
.replace("TODO nf-core: ", "")
.strip()
)
- warned.append(f"TODO string in `{fname}`: _{l}_")
+ warned.append(f"TODO string in `{fname}`: _{line}_")
file_paths.append(os.path.join(root, fname))
except FileNotFoundError:
log.debug(f"Could not open file {fname} in pipeline_todos lint test")
diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py
index 55060442b1..cade9ca3ea 100644
--- a/nf_core/lint/readme.py
+++ b/nf_core/lint/readme.py
@@ -31,7 +31,7 @@ def readme(self):
# Remove field that should be ignored according to the linting config
ignore_configs = self.lint_config.get("readme", [])
- with open(os.path.join(self.wf_path, "README.md"), "r") as fh:
+ with open(os.path.join(self.wf_path, "README.md")) as fh:
content = fh.read()
if "nextflow_badge" not in ignore_configs:
diff --git a/nf_core/lint/system_exit.py b/nf_core/lint/system_exit.py
index 56a526d97b..435a2452d0 100644
--- a/nf_core/lint/system_exit.py
+++ b/nf_core/lint/system_exit.py
@@ -25,9 +25,9 @@ def system_exit(self):
for file in to_check:
try:
with file.open() as fh:
- for i, l in enumerate(fh.readlines(), start=1):
- if "System.exit" in l and not "System.exit(0)" in l:
- warned.append(f"`System.exit` in {file.name}: _{l.strip()}_ [line {i}]")
+ for i, line in enumerate(fh.readlines(), start=1):
+ if "System.exit" in line and "System.exit(0)" not in line:
+ warned.append(f"`System.exit` in {file.name}: _{line.strip()}_ [line {i}]")
except FileNotFoundError:
log.debug(f"Could not open file {file.name} in system_exit lint test")
diff --git a/nf_core/lint/template_strings.py b/nf_core/lint/template_strings.py
index fb1f0f32e5..3467229362 100644
--- a/nf_core/lint/template_strings.py
+++ b/nf_core/lint/template_strings.py
@@ -1,4 +1,3 @@
-import io
import mimetypes
import re
@@ -30,11 +29,11 @@ def template_strings(self):
if encoding is not None or (ftype is not None and any([ftype.startswith(ft) for ft in binary_ftypes])):
continue
- with io.open(fn, "r", encoding="latin1") as fh:
+ with open(fn, encoding="latin1") as fh:
lnum = 0
- for l in fh:
+ for line in fh:
lnum += 1
- cc_matches = re.findall(r"[^$]{{[^:}]*}}", l)
+ cc_matches = re.findall(r"[^$]{{[^:}]*}}", line)
if len(cc_matches) > 0:
for cc_match in cc_matches:
failed.append(f"Found a Jinja template string in `{fn}` L{lnum}: {cc_match}")
diff --git a/nf_core/lint/version_consistency.py b/nf_core/lint/version_consistency.py
index fa5b50de01..e396ca9e7a 100644
--- a/nf_core/lint/version_consistency.py
+++ b/nf_core/lint/version_consistency.py
@@ -31,7 +31,7 @@ def version_consistency(self):
versions["manifest.version"] = self.nf_config.get("manifest.version", "").strip(" '\"")
# Get version from the docker tag
- if self.nf_config.get("process.container", "") and not ":" in self.nf_config.get("process.container", ""):
+ if self.nf_config.get("process.container", "") and ":" not in self.nf_config.get("process.container", ""):
failed.append(f"Docker slug seems not to have a version tag: {self.nf_config.get('process.container', '')}")
# Get config container tag (if set; one container per workflow)
@@ -53,8 +53,9 @@ def version_consistency(self):
# Check if they are consistent
if len(set(versions.values())) != 1:
failed.append(
- "The versioning is not consistent between container, release tag "
- "and config. Found {}".format(", ".join(["{} = {}".format(k, v) for k, v in versions.items()]))
+ "The versioning is not consistent between container, release tag " "and config. Found {}".format(
+ ", ".join([f"{k} = {v}" for k, v in versions.items()])
+ )
)
passed.append("Version tags are numeric and consistent between container, release tag and config.")
diff --git a/nf_core/list.py b/nf_core/list.py
index 94d9d8e043..d0b59319a3 100644
--- a/nf_core/list.py
+++ b/nf_core/list.py
@@ -1,6 +1,5 @@
"""Lists available nf-core pipelines and versions."""
-from __future__ import print_function
import json
import logging
@@ -205,7 +204,7 @@ def print_summary(self):
def sort_pulled_date(wf):
try:
return wf.local_wf.last_pull * -1
- except:
+ except Exception:
return 0
filtered_workflows.sort(key=sort_pulled_date)
diff --git a/nf_core/module-template/tests/main.nf.test b/nf_core/module-template/tests/main.nf.test
index 5a2e6cdc63..e1b1dadf12 100644
--- a/nf_core/module-template/tests/main.nf.test
+++ b/nf_core/module-template/tests/main.nf.test
@@ -26,12 +26,12 @@ nextflow_process {
"""
// TODO nf-core: define inputs of the process here. Example:
{% if has_meta %}
- input = [
+ input[0] = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true)
]
{%- else %}
- input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true)
+ input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true)
{%- endif %}
"""
}
@@ -58,12 +58,12 @@ nextflow_process {
"""
// TODO nf-core: define inputs of the process here. Example:
{% if has_meta %}
- input = [
+ input[0] = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true)
]
{%- else %}
- input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true)
+ input[0] = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true)
{%- endif %}
"""
}
diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py
index 4b36f302bd..6be871ece8 100644
--- a/nf_core/modules/__init__.py
+++ b/nf_core/modules/__init__.py
@@ -6,7 +6,7 @@
from .list import ModuleList
from .modules_json import ModulesJson
from .modules_repo import ModulesRepo
-from .modules_utils import ModuleException
+from .modules_utils import ModuleExceptionError
from .patch import ModulePatch
from .remove import ModuleRemove
from .update import ModuleUpdate
diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py
index 25259f1a16..b9003be974 100644
--- a/nf_core/modules/bump_versions.py
+++ b/nf_core/modules/bump_versions.py
@@ -4,8 +4,6 @@
"""
-from __future__ import print_function
-
import logging
import os
import re
@@ -24,9 +22,8 @@
import nf_core.utils
from nf_core.components.components_command import ComponentCommand
from nf_core.components.nfcore_component import NFCoreComponent
-from nf_core.utils import custom_yaml_dumper
+from nf_core.utils import custom_yaml_dumper, rich_force_colors
from nf_core.utils import plural_s as _s
-from nf_core.utils import rich_force_colors
log = logging.getLogger(__name__)
@@ -74,7 +71,7 @@ def bump_versions(
# Verify that this is not a pipeline
if not self.repo_type == "modules":
- raise nf_core.modules.modules_utils.ModuleException(
+ raise nf_core.modules.modules_utils.ModuleExceptionError(
"This command only works on the nf-core/modules repository, not on pipelines!"
)
@@ -105,12 +102,14 @@ def bump_versions(
if module:
self.show_up_to_date = True
if all_modules:
- raise nf_core.modules.modules_utils.ModuleException(
+ raise nf_core.modules.modules_utils.ModuleExceptionError(
"You cannot specify a tool and request all tools to be bumped."
)
nfcore_modules = [m for m in nfcore_modules if m.component_name == module]
if len(nfcore_modules) == 0:
- raise nf_core.modules.modules_utils.ModuleException(f"Could not find the specified module: '{module}'")
+ raise nf_core.modules.modules_utils.ModuleExceptionError(
+ f"Could not find the specified module: '{module}'"
+ )
progress_bar = Progress(
"[bold blue]{task.description}",
@@ -146,10 +145,10 @@ def bump_module_version(self, module: NFCoreComponent) -> bool:
except FileNotFoundError:
# try it in the main.nf instead
try:
- with open(module.main_nf, "r") as fh:
- for l in fh:
- if "bioconda::" in l:
- bioconda_packages = [b for b in l.split() if "bioconda::" in b]
+ with open(module.main_nf) as fh:
+ for line in fh:
+ if "bioconda::" in line:
+ bioconda_packages = [b for b in line.split() if "bioconda::" in b]
except FileNotFoundError:
log.error(
f"Neither `environment.yml` nor `main.nf` of {module.component_name} module could be read to get bioconada version of used tools."
@@ -208,7 +207,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool:
),
]
- with open(module.main_nf, "r") as fh:
+ with open(module.main_nf) as fh:
content = fh.read()
# Go over file content of main.nf and find replacements
@@ -241,7 +240,7 @@ def bump_module_version(self, module: NFCoreComponent) -> bool:
fh.write(content)
# change version in environment.yml
- with open(module.environment_yml, "r") as fh:
+ with open(module.environment_yml) as fh:
env_yml = yaml.safe_load(fh)
re.sub(bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'", env_yml["dependencies"])
with open(module.environment_yml, "w") as fh:
@@ -266,7 +265,7 @@ def get_bioconda_version(self, module: NFCoreComponent) -> List[str]:
# Check whether file exists and load it
bioconda_packages = []
try:
- with open(module.environment_yml, "r") as fh:
+ with open(module.environment_yml) as fh:
env_yml = yaml.safe_load(fh)
bioconda_packages = env_yml.get("dependencies", [])
except FileNotFoundError:
@@ -289,7 +288,7 @@ def _print_results(self) -> None:
for m in [self.up_to_date, self.updated, self.failed]:
try:
max_mod_name_len = max(len(m[2]), max_mod_name_len)
- except:
+ except Exception:
pass
def format_result(module_updates: List[Tuple[str, str]], table: Table) -> Table:
diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py
index 68a38cc0cd..866e6312aa 100644
--- a/nf_core/modules/lint/__init__.py
+++ b/nf_core/modules/lint/__init__.py
@@ -6,7 +6,6 @@
nf-core modules lint
"""
-from __future__ import print_function
import logging
import os
@@ -16,7 +15,7 @@
import nf_core.modules.modules_utils
import nf_core.utils
-from nf_core.components.lint import ComponentLint, LintException, LintResult
+from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult
from nf_core.lint_utils import console
log = logging.getLogger(__name__)
@@ -119,11 +118,11 @@ def lint(
# Only lint the given module
if module:
if all_modules:
- raise LintException("You cannot specify a tool and request all tools to be linted.")
+ raise LintExceptionError("You cannot specify a tool and request all tools to be linted.")
local_modules = []
remote_modules = [m for m in self.all_remote_components if m.component_name == module]
if len(remote_modules) == 0:
- raise LintException(f"Could not find the specified module: '{module}'")
+ raise LintExceptionError(f"Could not find the specified module: '{module}'")
else:
local_modules = self.all_local_components
remote_modules = self.all_remote_components
diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py
index a052425539..c2fc9384e5 100644
--- a/nf_core/modules/lint/environment_yml.py
+++ b/nf_core/modules/lint/environment_yml.py
@@ -23,14 +23,14 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent)
env_yml = None
# load the environment.yml file
try:
- with open(Path(module.component_dir, "environment.yml"), "r") as fh:
+ with open(Path(module.component_dir, "environment.yml")) as fh:
env_yml = yaml.safe_load(fh)
module.passed.append(("environment_yml_exists", "Module's `environment.yml` exists", module.environment_yml))
except FileNotFoundError:
# check if the module's main.nf requires a conda environment
- with open(Path(module.component_dir, "main.nf"), "r") as fh:
+ with open(Path(module.component_dir, "main.nf")) as fh:
main_nf = fh.read()
if 'conda "${moduleDir}/environment.yml"' in main_nf:
module.failed.append(
@@ -49,9 +49,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent)
if env_yml:
valid_env_yml = False
try:
- with open(
- Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json"), "r"
- ) as fh:
+ with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/environment-schema.json")) as fh:
schema = json.load(fh)
validators.validate(instance=env_yml, schema=schema)
module.passed.append(
@@ -92,7 +90,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent)
yaml.dump(env_yml, fh, Dumper=custom_yaml_dumper())
# Check that the name in the environment.yml file matches the name in the meta.yml file
- with open(Path(module.component_dir, "meta.yml"), "r") as fh:
+ with open(Path(module.component_dir, "meta.yml")) as fh:
meta_yml = yaml.safe_load(fh)
if env_yml["name"] == meta_yml["name"]:
diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py
index 56a9e99925..fd4d81f7f2 100644
--- a/nf_core/modules/lint/main_nf.py
+++ b/nf_core/modules/lint/main_nf.py
@@ -55,7 +55,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar):
if lines is None:
try:
# Check whether file exists and load it
- with open(module.main_nf, "r") as fh:
+ with open(module.main_nf) as fh:
lines = fh.readlines()
module.passed.append(("main_nf_exists", "Module file exists", module.main_nf))
except FileNotFoundError:
@@ -81,39 +81,39 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar):
script_lines = []
shell_lines = []
when_lines = []
- for l in lines:
- if re.search(r"^\s*process\s*\w*\s*{", l) and state == "module":
+ for line in lines:
+ if re.search(r"^\s*process\s*\w*\s*{", line) and state == "module":
state = "process"
- if re.search(r"input\s*:", l) and state in ["process"]:
+ if re.search(r"input\s*:", line) and state in ["process"]:
state = "input"
continue
- if re.search(r"output\s*:", l) and state in ["input", "process"]:
+ if re.search(r"output\s*:", line) and state in ["input", "process"]:
state = "output"
continue
- if re.search(r"when\s*:", l) and state in ["input", "output", "process"]:
+ if re.search(r"when\s*:", line) and state in ["input", "output", "process"]:
state = "when"
continue
- if re.search(r"script\s*:", l) and state in ["input", "output", "when", "process"]:
+ if re.search(r"script\s*:", line) and state in ["input", "output", "when", "process"]:
state = "script"
continue
- if re.search(r"shell\s*:", l) and state in ["input", "output", "when", "process"]:
+ if re.search(r"shell\s*:", line) and state in ["input", "output", "when", "process"]:
state = "shell"
continue
# Perform state-specific linting checks
- if state == "process" and not _is_empty(l):
- process_lines.append(l)
- if state == "input" and not _is_empty(l):
- inputs.extend(_parse_input(module, l))
- if state == "output" and not _is_empty(l):
- outputs += _parse_output(module, l)
+ if state == "process" and not _is_empty(line):
+ process_lines.append(line)
+ if state == "input" and not _is_empty(line):
+ inputs.extend(_parse_input(module, line))
+ if state == "output" and not _is_empty(line):
+ outputs += _parse_output(module, line)
outputs = list(set(outputs)) # remove duplicate 'meta's
- if state == "when" and not _is_empty(l):
- when_lines.append(l)
- if state == "script" and not _is_empty(l):
- script_lines.append(l)
- if state == "shell" and not _is_empty(l):
- shell_lines.append(l)
+ if state == "when" and not _is_empty(line):
+ when_lines.append(line)
+ if state == "script" and not _is_empty(line):
+ script_lines.append(line)
+ if state == "shell" and not _is_empty(line):
+ shell_lines.append(line)
# Check that we have required sections
if not len(outputs):
@@ -140,7 +140,7 @@ def main_nf(module_lint_object, module, fix_version, registry, progress_bar):
# Check that shell uses a template
if len(shell_lines):
- if any("template" in l for l in shell_lines):
+ if any("template" in line for line in shell_lines):
module.passed.append(("main_nf_shell_template", "`template` found in `shell` block", module.main_nf))
else:
module.failed.append(("main_nf_shell_template", "No `template` found in `shell` block", module.main_nf))
@@ -249,21 +249,21 @@ def check_process_section(self, lines, registry, fix_version, progress_bar):
# Deprecated enable_conda
for i, raw_line in enumerate(lines):
url = None
- l = raw_line.strip(" \n'\"}:")
+ line = raw_line.strip(" \n'\"}:")
# Catch preceeding "container "
- if l.startswith("container"):
- l = l.replace("container", "").strip(" \n'\"}:")
+ if line.startswith("container"):
+ line = line.replace("container", "").strip(" \n'\"}:")
- if _container_type(l) == "conda":
- if "bioconda::" in l:
- bioconda_packages = [b for b in l.split() if "bioconda::" in b]
- match = re.search(r"params\.enable_conda", l)
+ if _container_type(line) == "conda":
+ if "bioconda::" in line:
+ bioconda_packages = [b for b in line.split() if "bioconda::" in b]
+ match = re.search(r"params\.enable_conda", line)
if match is None:
self.passed.append(
(
"deprecated_enable_conda",
- f"Deprecated parameter 'params.enable_conda' correctly not found in the conda definition",
+ "Deprecated parameter 'params.enable_conda' correctly not found in the conda definition",
self.main_nf,
)
)
@@ -271,35 +271,35 @@ def check_process_section(self, lines, registry, fix_version, progress_bar):
self.failed.append(
(
"deprecated_enable_conda",
- f"Found deprecated parameter 'params.enable_conda' in the conda definition",
+ "Found deprecated parameter 'params.enable_conda' in the conda definition",
self.main_nf,
)
)
- if _container_type(l) == "singularity":
+ if _container_type(line) == "singularity":
# e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img -> v1.2.0_cv1
# e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0 -> 0.11.9--0
# Please god let's find a better way to do this than regex
- match = re.search(r"(?:[:.])?([A-Za-z\d\-_.]+?)(?:\.img)?(?:\.sif)?$", l)
+ match = re.search(r"(?:[:.])?([A-Za-z\d\-_.]+?)(?:\.img)?(?:\.sif)?$", line)
if match is not None:
singularity_tag = match.group(1)
self.passed.append(("singularity_tag", f"Found singularity tag: {singularity_tag}", self.main_nf))
else:
self.failed.append(("singularity_tag", "Unable to parse singularity tag", self.main_nf))
singularity_tag = None
- url = urlparse(l.split("'")[0])
+ url = urlparse(line.split("'")[0])
- if _container_type(l) == "docker":
+ if _container_type(line) == "docker":
# e.g. "quay.io/biocontainers/krona:2.7.1--pl526_5 -> 2.7.1--pl526_5
# e.g. "biocontainers/biocontainers:v1.2.0_cv1 -> v1.2.0_cv1
- match = re.search(r":([A-Za-z\d\-_.]+)$", l)
+ match = re.search(r":([A-Za-z\d\-_.]+)$", line)
if match is not None:
docker_tag = match.group(1)
self.passed.append(("docker_tag", f"Found docker tag: {docker_tag}", self.main_nf))
else:
self.failed.append(("docker_tag", "Unable to parse docker tag", self.main_nf))
docker_tag = None
- if l.startswith(registry):
- l_stripped = re.sub(r"\W+$", "", l)
+ if line.startswith(registry):
+ l_stripped = re.sub(r"\W+$", "", line)
self.failed.append(
(
"container_links",
@@ -308,15 +308,15 @@ def check_process_section(self, lines, registry, fix_version, progress_bar):
)
)
else:
- self.passed.append(("container_links", f"Container prefix is correct", self.main_nf))
+ self.passed.append(("container_links", "Container prefix is correct", self.main_nf))
# Guess if container name is simple one (e.g. nfcore/ubuntu:20.04)
# If so, add quay.io as default container prefix
- if l.count("/") == 1 and l.count(":") == 1:
- l = "/".join([registry, l]).replace("//", "/")
- url = urlparse(l.split("'")[0])
+ if line.count("/") == 1 and line.count(":") == 1:
+ line = "/".join([registry, line]).replace("//", "/")
+ url = urlparse(line.split("'")[0])
- if l.startswith("container") or _container_type(l) == "docker" or _container_type(l) == "singularity":
+ if line.startswith("container") or _container_type(line) == "docker" or _container_type(line) == "singularity":
check_container_link_line(self, raw_line, registry)
# Try to connect to container URLs
@@ -348,7 +348,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar):
# Get bioconda packages from environment.yml
try:
- with open(Path(self.component_dir, "environment.yml"), "r") as fh:
+ with open(Path(self.component_dir, "environment.yml")) as fh:
env_yml = yaml.safe_load(fh)
if "dependencies" in env_yml:
bioconda_packages = [x for x in env_yml["dependencies"] if isinstance(x, str) and "bioconda::" in x]
@@ -424,7 +424,7 @@ def check_process_section(self, lines, registry, fix_version, progress_bar):
def check_process_labels(self, lines):
correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long"]
- all_labels = [l.strip() for l in lines if l.lstrip().startswith("label ")]
+ all_labels = [line.strip() for line in lines if line.lstrip().startswith("label ")]
bad_labels = []
good_labels = []
if len(all_labels) > 0:
@@ -475,14 +475,14 @@ def check_process_labels(self, lines):
def check_container_link_line(self, raw_line, registry):
"""Look for common problems in the container name / URL, for docker and singularity."""
- l = raw_line.strip(" \n'\"}:")
+ line = raw_line.strip(" \n'\"}:")
# lint double quotes
- if l.count('"') > 2:
+ if line.count('"') > 2:
self.failed.append(
(
"container_links",
- f"Too many double quotes found when specifying container: {l.lstrip('container ')}",
+ f"Too many double quotes found when specifying container: {line.lstrip('container ')}",
self.main_nf,
)
)
@@ -490,7 +490,7 @@ def check_container_link_line(self, raw_line, registry):
self.passed.append(
(
"container_links",
- f"Correct number of double quotes found when specifying container: {l.lstrip('container ')}",
+ f"Correct number of double quotes found when specifying container: {line.lstrip('container ')}",
self.main_nf,
)
)
@@ -524,7 +524,9 @@ def check_container_link_line(self, raw_line, registry):
)
# lint more than one container in the same line
- if ("https://containers" in l or "https://depot" in l) and ("biocontainers/" in l or l.startswith(registry)):
+ if ("https://containers" in line or "https://depot" in line) and (
+ "biocontainers/" in line or line.startswith(registry)
+ ):
self.warned.append(
(
"container_links",
@@ -576,7 +578,7 @@ def _parse_output(self, line):
output = []
if "meta" in line:
output.append("meta")
- if not "emit:" in line:
+ if "emit:" not in line:
self.failed.append(("missing_emit", f"Missing emit statement: {line.strip()}", self.main_nf))
else:
output.append(line.split("emit:")[1].strip())
@@ -605,14 +607,14 @@ def _fix_module_version(self, current_version, latest_version, singularity_tag,
# Get latest build
build = _get_build(response)
- with open(self.main_nf, "r") as source:
+ with open(self.main_nf) as source:
lines = source.readlines()
# Check if the new version + build exist and replace
new_lines = []
for line in lines:
- l = line.strip(" '\"")
- build_type = _container_type(l)
+ line_stripped = line.strip(" '\"")
+ build_type = _container_type(line_stripped)
if build_type == "conda":
new_lines.append(re.sub(rf"{current_version}", f"{latest_version}", line))
elif build_type in ("singularity", "docker"):
diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py
index 7552c1ceae..551a978f4d 100644
--- a/nf_core/modules/lint/meta_yml.py
+++ b/nf_core/modules/lint/meta_yml.py
@@ -54,7 +54,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None
meta_yaml = yaml.safe_load("".join(lines))
if meta_yaml is None:
try:
- with open(module.meta_yml, "r") as fh:
+ with open(module.meta_yml) as fh:
meta_yaml = yaml.safe_load(fh)
module.passed.append(("meta_yml_exists", "Module `meta.yml` exists", module.meta_yml))
except FileNotFoundError:
@@ -64,7 +64,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None
# Confirm that the meta.yml file is valid according to the JSON schema
valid_meta_yml = False
try:
- with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json"), "r") as fh:
+ with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json")) as fh:
schema = json.load(fh)
validators.validate(instance=meta_yaml, schema=schema)
module.passed.append(("meta_yml_valid", "Module `meta.yml` is valid", module.meta_yml))
diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py
index d52962eabb..29bf78a66b 100644
--- a/nf_core/modules/lint/module_patch.py
+++ b/nf_core/modules/lint/module_patch.py
@@ -40,7 +40,7 @@ def check_patch_valid(module, patch_path):
Returns:
(bool): False if any test failed, True otherwise
"""
- with open(patch_path, "r") as fh:
+ with open(patch_path) as fh:
patch_lines = fh.readlines()
# Check that the file contains a patch for at least one file
@@ -170,8 +170,8 @@ def patch_reversible(module_lint_object, module, patch_path):
)
except LookupError:
# Patch failed. Save the patch file by moving to the install dir
- module.failed.append((("patch_reversible", "Patch file is outdated or edited", patch_path)))
+ module.failed.append(("patch_reversible", "Patch file is outdated or edited", patch_path))
return False
- module.passed.append((("patch_reversible", "Patch agrees with module files", patch_path)))
+ module.passed.append(("patch_reversible", "Patch agrees with module files", patch_path))
return True
diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py
index 87033e3f49..520f8cf0a2 100644
--- a/nf_core/modules/lint/module_tests.py
+++ b/nf_core/modules/lint/module_tests.py
@@ -44,7 +44,7 @@ def module_tests(_, module: NFCoreComponent):
if module.nftest_main_nf.is_file():
# Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test
- with open(module.nftest_main_nf, "r") as fh:
+ with open(module.nftest_main_nf) as fh:
if "snapshot(" in fh.read():
snap_file = module.nftest_testdir / "main.nf.test.snap"
if snap_file.is_file():
@@ -52,7 +52,7 @@ def module_tests(_, module: NFCoreComponent):
("test_snapshot_exists", "snapshot file `main.nf.test.snap` exists", snap_file)
)
# Validate no empty files
- with open(snap_file, "r") as snap_fh:
+ with open(snap_file) as snap_fh:
try:
snap_content = json.load(snap_fh)
for test_name in snap_content.keys():
@@ -145,7 +145,7 @@ def module_tests(_, module: NFCoreComponent):
pytest_yml_path = module.base_dir / "tests" / "config" / "pytest_modules.yml"
if pytest_yml_path.is_file() and not is_pytest:
try:
- with open(pytest_yml_path, "r") as fh:
+ with open(pytest_yml_path) as fh:
pytest_yml = yaml.safe_load(fh)
if module.component_name in pytest_yml.keys():
module.failed.append(
@@ -165,7 +165,7 @@ def module_tests(_, module: NFCoreComponent):
if module.tags_yml.is_file():
# Check that tags.yml exists and it has the correct entry
module.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", module.tags_yml))
- with open(module.tags_yml, "r") as fh:
+ with open(module.tags_yml) as fh:
tags_yml = yaml.safe_load(fh)
if module.component_name in tags_yml.keys():
module.passed.append(("test_tags_yml", "correct entry in tags.yml", module.tags_yml))
diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py
index efce3868e5..a97229ff62 100644
--- a/nf_core/modules/modules_differ.py
+++ b/nf_core/modules/modules_differ.py
@@ -74,9 +74,9 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d
temp_path = Path(to_dir, file)
curr_path = Path(from_dir, file)
if temp_path.exists() and curr_path.exists() and temp_path.is_file():
- with open(temp_path, "r") as fh:
+ with open(temp_path) as fh:
new_lines = fh.readlines()
- with open(curr_path, "r") as fh:
+ with open(curr_path) as fh:
old_lines = fh.readlines()
if new_lines == old_lines:
@@ -93,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d
diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff)
elif temp_path.exists():
- with open(temp_path, "r") as fh:
+ with open(temp_path) as fh:
new_lines = fh.readlines()
# The file was created
# Show file against /dev/null
@@ -108,7 +108,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d
elif curr_path.exists():
# The file was removed
# Show file against /dev/null
- with open(curr_path, "r") as fh:
+ with open(curr_path) as fh:
old_lines = fh.readlines()
diff = difflib.unified_diff(
old_lines,
@@ -279,7 +279,7 @@ def per_file_patch(patch_fn):
dict[str, str]: A dictionary indexed by the filenames with the
file patches as values
"""
- with open(patch_fn, "r") as fh:
+ with open(patch_fn) as fh:
lines = fh.readlines()
patches = {}
@@ -447,7 +447,7 @@ def try_apply_patch(module, repo_path, patch_path, module_dir, reverse=False):
log.debug(f"Applying patch to {file}")
fn = Path(file).relative_to(module_relpath)
file_path = module_dir / fn
- with open(file_path, "r") as fh:
+ with open(file_path) as fh:
file_lines = fh.readlines()
patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse)
new_files[str(fn)] = patched_new_lines
diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py
index 32eb8736d6..f68c27b2d8 100644
--- a/nf_core/modules/modules_json.py
+++ b/nf_core/modules/modules_json.py
@@ -637,7 +637,7 @@ def load(self):
UserWarning: If the modules.json file is not found
"""
try:
- with open(self.modules_json_path, "r") as fh:
+ with open(self.modules_json_path) as fh:
try:
self.modules_json = json.load(fh)
except json.JSONDecodeError as e:
diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py
index 152ed7b0c0..204c20fd71 100644
--- a/nf_core/modules/modules_repo.py
+++ b/nf_core/modules/modules_repo.py
@@ -1,8 +1,6 @@
-import filecmp
import logging
import os
import shutil
-from pathlib import Path
import git
import rich
diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py
index 3ae01e9eef..ca8993483b 100644
--- a/nf_core/modules/modules_utils.py
+++ b/nf_core/modules/modules_utils.py
@@ -9,7 +9,7 @@
log = logging.getLogger(__name__)
-class ModuleException(Exception):
+class ModuleExceptionError(Exception):
"""Exception raised when there was an error with module commands"""
pass
@@ -69,12 +69,12 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis
if os.path.exists(nfcore_modules_dir):
for m in sorted([m for m in os.listdir(nfcore_modules_dir) if not m == "lib"]):
if not os.path.isdir(os.path.join(nfcore_modules_dir, m)):
- raise ModuleException(
+ raise ModuleExceptionError(
f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories."
)
m_content = os.listdir(os.path.join(nfcore_modules_dir, m))
# Not a module, but contains sub-modules
- if not "main.nf" in m_content:
+ if "main.nf" not in m_content:
for tool in m_content:
nfcore_modules_names.append(os.path.join(m, tool))
else:
diff --git a/nf_core/params_file.py b/nf_core/params_file.py
index 5c50c53fb9..51986821b5 100644
--- a/nf_core/params_file.py
+++ b/nf_core/params_file.py
@@ -1,6 +1,5 @@
""" Create a YAML parameter file """
-from __future__ import print_function
import json
import logging
@@ -9,8 +8,6 @@
from typing import Literal, Optional
import questionary
-import rich
-import rich.columns
import nf_core.list
import nf_core.utils
@@ -196,7 +193,7 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal
description = properties.get("description", "")
self.schema_obj.get_schema_defaults()
default = properties.get("default")
- typ = properties.get("type")
+ type = properties.get("type")
required = name in required_properties
out += _print_wrapped(name, "-", mode="both")
@@ -204,8 +201,11 @@ def format_param(self, name, properties, required_properties=(), show_hidden=Fal
if description:
out += _print_wrapped(description + "\n", mode="none", indent=4)
- if typ:
- out += _print_wrapped(f"Type: {typ}", mode="none", indent=4)
+ if type:
+ out += _print_wrapped(f"Type: {type}", mode="none", indent=4)
+
+ if required:
+ out += _print_wrapped("Required", mode="none", indent=4)
out += _print_wrapped("\n", mode="end")
out += f"# {name} = {json.dumps(default)}\n"
diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig
index b6b3190776..9b990088ab 100644
--- a/nf_core/pipeline-template/.editorconfig
+++ b/nf_core/pipeline-template/.editorconfig
@@ -22,3 +22,11 @@ indent_size = unset
[/assets/email*]
indent_size = unset
+
+# ignore Readme
+[README.md]
+indent_style = unset
+
+# ignore python
+[*.{py}]
+indent_style = unset
diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml
index 2f83a0962c..4c9fd69fcc 100644
--- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml
+++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml
@@ -31,7 +31,7 @@ jobs:
}
profiles: test_full
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: Tower debug log file
path: |
diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml
index 9a0bf4afbc..25726aa1c9 100644
--- a/nf_core/pipeline-template/.github/workflows/awstest.yml
+++ b/nf_core/pipeline-template/.github/workflows/awstest.yml
@@ -25,7 +25,7 @@ jobs:
}
profiles: test
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: Tower debug log file
path: |
diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml
index 8edfa540c9..057016e4be 100644
--- a/nf_core/pipeline-template/.github/workflows/branch.yml
+++ b/nf_core/pipeline-template/.github/workflows/branch.yml
@@ -19,7 +19,7 @@ jobs:
# NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets
- name: Post PR comment
if: failure()
- uses: mshick/add-pr-comment@v1
+ uses: mshick/add-pr-comment@v2
with:
message: |
## This PR is against the `master` branch :x:
diff --git a/nf_core/pipeline-template/.github/workflows/clean-up.yml b/nf_core/pipeline-template/.github/workflows/clean-up.yml
index 427aad5087..8feb3fb017 100644
--- a/nf_core/pipeline-template/.github/workflows/clean-up.yml
+++ b/nf_core/pipeline-template/.github/workflows/clean-up.yml
@@ -10,7 +10,7 @@ jobs:
issues: write
pull-requests: write
steps:
- - uses: actions/stale@v7
+ - uses: actions/stale@v9
with:
stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days."
stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful."
diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml
new file mode 100644
index 0000000000..d5d0307262
--- /dev/null
+++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml
@@ -0,0 +1,68 @@
+name: Test successful pipeline download with 'nf-core download'
+
+# Run the workflow when:
+# - dispatched manually
+# - when a PR is opened or reopened to master branch
+# - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev.
+
+on:
+ workflow_dispatch:
+ pull_request:
+ types:
+ - opened
+ branches:
+ - master
+ pull_request_target:
+ branches:
+ - master
+
+env:
+ NXF_ANSI_LOG: false
+
+jobs:
+ download:
+ runs-on: ["self-hosted"]
+ steps:
+ - name: Install Nextflow
+ uses: nf-core/setup-nextflow@v1
+
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ architecture: "x64"
+ - uses: eWaterCycle/setup-singularity@v7
+ with:
+ singularity-version: 3.8.3
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install git+https://github.com/nf-core/tools.git@dev
+
+ - name: Get the repository name and current branch set as environment variable
+ run: |
+ echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV}
+ echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV}
+ echo "REPO_BRANCH=${GITHUB_REF#refs/heads/}" >> ${GITHUB_ENV}
+
+ - name: Download the pipeline
+ env:
+ NXF_SINGULARITY_CACHEDIR: ./
+ run: |
+ nf-core download ${{ env.REPO_LOWERCASE }} \
+ --revision ${{ env.REPO_BRANCH }} \
+ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \
+ --compress "none" \
+ --container-system 'singularity' \
+ --container-library "quay.io" -l "docker.io" -l "ghcr.io" \
+ --container-cache-utilisation 'amend' \
+ --download-configuration
+
+ - name: Inspect download
+ run: tree ./${{ env.REPOTITLE_LOWERCASE }}
+
+ - name: Run the downloaded pipeline
+ env:
+ NXF_SINGULARITY_CACHEDIR: ./
+ NXF_SINGULARITY_HOME_MOUNT: true
+ run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results
diff --git a/nf_core/pipeline-template/.github/workflows/fix-linting.yml b/nf_core/pipeline-template/.github/workflows/fix-linting.yml
index 31e8cd2b36..d9986bd30f 100644
--- a/nf_core/pipeline-template/.github/workflows/fix-linting.yml
+++ b/nf_core/pipeline-template/.github/workflows/fix-linting.yml
@@ -24,32 +24,25 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}
- - uses: actions/setup-node@v4
-
- - name: Install Prettier
- run: npm install -g prettier @prettier/plugin-php
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.11
+ cache: "pip"
- # Check that we actually need to fix something
- - name: Run 'prettier --check'
- id: prettier_status
- run: |
- if prettier --check ${GITHUB_WORKSPACE}; then
- echo "result=pass" >> $GITHUB_OUTPUT
- else
- echo "result=fail" >> $GITHUB_OUTPUT
- fi
+ - name: Install pre-commit
+ run: pip install pre-commit
- - name: Run 'prettier --write'
- if: steps.prettier_status.outputs.result == 'fail'
- run: prettier --write ${GITHUB_WORKSPACE}
+ - name: Run pre-commit
+ run: pre-commit run --all-files || echo "status=fail" >> $GITHUB_ENV
- name: Commit & push changes
- if: steps.prettier_status.outputs.result == 'fail'
+ if: env.status == 'fail'
run: |
git config user.email "core@nf-co.re"
git config user.name "nf-core-bot"
git config push.default upstream
git add .
git status
- git commit -m "[automated] Fix linting with Prettier"
+ git commit -m "[automated] Fix linting with pre-commit"
git push {%- endraw %}
diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml
index 94aa5278be..a267f1ec14 100644
--- a/nf_core/pipeline-template/.github/workflows/linting.yml
+++ b/nf_core/pipeline-template/.github/workflows/linting.yml
@@ -11,61 +11,22 @@ on:
types: [published]
jobs:
- EditorConfig:
+ pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-node@v4
-
- - name: Install editorconfig-checker
- run: npm install -g editorconfig-checker
-
- - name: Run ECLint check
- run: editorconfig-checker -exclude README.md $(find .* -type f | grep -v '.git\|.py\|.md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile')
-
- Prettier:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - uses: actions/setup-node@v4
-
- - name: Install Prettier
- run: npm install -g prettier
-
- - name: Run Prettier --check
- run: prettier --check ${GITHUB_WORKSPACE}
-
- PythonBlack:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - name: Check code lints with Black
- uses: psf/black@stable
-
- # If the above check failed, post a comment on the PR explaining the failure
- - name: Post PR comment
- if: failure()
- uses: mshick/add-pr-comment@v1
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
with:
- message: |
- ## Python linting (`black`) is failing
-
- To keep the code consistent with lots of contributors, we run automated code consistency checks.
- To fix this CI test, please run:
-
- * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black`
- * Fix formatting errors in your pipeline: `black .`
-
- Once you push these changes the test should pass, and you can hide this comment :+1:
+ python-version: 3.11
+ cache: "pip"
- We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help!
+ - name: Install pre-commit
+ run: pip install pre-commit
- Thanks again for your contribution!
- repo-token: ${{ secrets.GITHUB_TOKEN }}
- allow-repeats: false
+ - name: Run pre-commit
+ run: pre-commit run --all-files
nf-core:
runs-on: ubuntu-latest
@@ -76,7 +37,7 @@ jobs:
- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
- - uses: actions/setup-python@v4
+ - uses: actions/setup-python@v5
with:
python-version: "3.11"
architecture: "x64"
@@ -99,7 +60,7 @@ jobs:
- name: Upload linting log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: linting-logs
path: |
diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml
index 09f8c423e5..e5528b29cf 100644
--- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml
+++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download lint results
- uses: dawidd6/action-download-artifact@v2
+ uses: dawidd6/action-download-artifact@v3
with:
workflow: linting.yml
workflow_conclusion: completed
diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml
index ad497db4e1..1dd48b123f 100644
--- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml
+++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml
@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/setup-python@v4
+ - uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
@@ -56,7 +56,7 @@ jobs:
bsky-post:
runs-on: ubuntu-latest
steps:
- - uses: zentered/bluesky-post-action@v0.0.2
+ - uses: zentered/bluesky-post-action@v0.1.0
with:
post: |
Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}!
diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml
index acf7269536..363d5b1d42 100644
--- a/nf_core/pipeline-template/.gitpod.yml
+++ b/nf_core/pipeline-template/.gitpod.yml
@@ -7,6 +7,7 @@ tasks:
- name: unset JAVA_TOOL_OPTIONS
command: |
unset JAVA_TOOL_OPTIONS
+
vscode:
extensions: # based on nf-core.nf-core-extensionpack
- codezombiech.gitignore # Language support for .gitignore files
diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml
index 0c31cdb99f..984321ff26 100644
--- a/nf_core/pipeline-template/.pre-commit-config.yaml
+++ b/nf_core/pipeline-template/.pre-commit-config.yaml
@@ -1,5 +1,10 @@
repos:
- repo: https://github.com/pre-commit/mirrors-prettier
- rev: "v2.7.1"
+ rev: "v3.1.1"
hooks:
- id: prettier
+ - repo: https://github.com/editorconfig-checker/editorconfig-checker.python
+ rev: "2.7.3"
+ hooks:
+ - id: editorconfig-checker
+ alias: ec
diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md
index c874090b51..5160ccb04d 100644
--- a/nf_core/pipeline-template/README.md
+++ b/nf_core/pipeline-template/README.md
@@ -1,7 +1,11 @@
{% if branded -%}
-#  
-
+
+
+
+
+
+
{% endif -%}
{% if github_badges -%}
[](https://github.com/{{ name }}/actions?query=workflow%3A%22nf-core+CI%22)
diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config
index dec8051a17..b0d053c262 100644
--- a/nf_core/pipeline-template/nextflow.config
+++ b/nf_core/pipeline-template/nextflow.config
@@ -100,6 +100,7 @@ profiles {
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
+ channels = ['conda-forge', 'bioconda', 'defaults']
apptainer.enabled = false
}
mamba {
diff --git a/nf_core/pipeline-template/pyproject.toml b/nf_core/pipeline-template/pyproject.toml
index 0d62beb6f9..984c091091 100644
--- a/nf_core/pipeline-template/pyproject.toml
+++ b/nf_core/pipeline-template/pyproject.toml
@@ -1,10 +1,13 @@
-# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Black.
+# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Ruff.
# Should be kept the same as nf-core/tools to avoid fighting with template synchronisation.
-[tool.black]
+[tool.ruff]
line-length = 120
-target_version = ["py37", "py38", "py39", "py310"]
+target-version = "py38"
+select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"]
+cache-dir = "~/.cache/ruff"
-[tool.isort]
-profile = "black"
-known_first_party = ["nf_core"]
-multi_line_output = 3
+[tool.ruff.isort]
+known-first-party = ["nf_core"]
+
+[tool.ruff.per-file-ignores]
+"__init__.py" = ["E402", "F401"]
diff --git a/nf_core/refgenie.py b/nf_core/refgenie.py
index 6f09b75532..de9201bcd6 100644
--- a/nf_core/refgenie.py
+++ b/nf_core/refgenie.py
@@ -2,7 +2,6 @@
Update a nextflow.config file with refgenie genomes
"""
-import json
import logging
import os
import re
@@ -86,7 +85,7 @@ def _update_nextflow_home_config(refgenie_genomes_config_file, nxf_home):
if os.path.exists(nxf_home_config):
# look for include statement in config
has_include_statement = False
- with open(nxf_home_config, "r") as fh:
+ with open(nxf_home_config) as fh:
lines = fh.readlines()
for line in lines:
if re.match(rf"\s*includeConfig\s*'{os.path.abspath(refgenie_genomes_config_file)}'", line):
diff --git a/nf_core/ro_crate.py b/nf_core/ro_crate.py
new file mode 100644
index 0000000000..fe8bc6a998
--- /dev/null
+++ b/nf_core/ro_crate.py
@@ -0,0 +1,197 @@
+#!/usr/bin/env python
+""" Code to deal with pipeline RO (Research Object) Crates """
+
+
+import logging
+import tempfile
+from pathlib import Path
+from typing import Union
+
+import requests
+import rocrate.model.entity
+import rocrate.rocrate
+from rocrate.model.person import Person
+
+from nf_core.utils import Pipeline
+
+log = logging.getLogger(__name__)
+
+
+class RoCrate:
+ """Class to generate an RO Crate for a pipeline"""
+
+ def __init__(self, pipeline_dir: Path, version=""):
+ from nf_core.utils import is_pipeline_directory
+
+ is_pipeline_directory(pipeline_dir)
+ self.pipeline_dir = pipeline_dir
+ self.version = version
+ self.crate: rocrate.rocrate.ROCrate
+ self.pipeline_obj = Pipeline(str(self.pipeline_dir))
+ self.pipeline_obj._load()
+
+ def create_ro_crate(self, outdir: Path, metadata_fn="", zip_fn=""):
+ """Create an RO Crate for the pipeline"""
+
+ # Set input paths
+ self.get_crate_paths(outdir)
+
+ self.make_workflow_ro_crate(self.pipeline_dir)
+
+ # Save just the JSON metadata file
+ if metadata_fn is not None:
+ log.info(f"Saving metadata file '{metadata_fn}'")
+ # Save the crate to a temporary directory
+ tmpdir = Path(tempfile.mkdtemp(), "wf")
+ self.crate.write(tmpdir)
+ # Now save just the JSON file
+ crate_json_fn = Path(tmpdir, "ro-crate-metadata.json")
+ crate_json_fn.rename(metadata_fn)
+
+ # Save the whole crate zip file
+ if zip_fn is not None:
+ log.info(f"Saving zip file '{zip_fn}'")
+ self.crate.write_zip(zip_fn)
+
+ def make_workflow_ro_crate(self, path: Path):
+ import nf_core.utils
+
+ if self.pipeline_obj is None:
+ raise ValueError("Pipeline object not loaded")
+
+ # Create the RO Crate
+ self.crate = rocrate.rocrate.ROCrate()
+
+ # Conform to RO-Crate 1.1 and workflowhub-ro-crate
+
+ # Set language type
+ programming_language = rocrate.model.entity.Entity(
+ self.crate,
+ "#nextflow",
+ properties={
+ "@type": ["ComputerLanguage", "SoftwareApplication"],
+ "name": "Nextflow",
+ "url": "https://www.nextflow.io/",
+ "identifier": "https://www.nextflow.io/",
+ "version": self.pipeline_obj.nf_config.get("manifest.nextflowVersion", ""),
+ },
+ )
+ self.crate.add(programming_language)
+ self.crate.update_jsonld(
+ {
+ "@id": "ro-crate-metadata.json",
+ "conformsTo": [
+ {"@id": "https://w3id.org/ro/crate/1.1"},
+ {"@id": "https://w3id.org/workflowhub/workflow-ro-crate/1.0"},
+ ],
+ }
+ )
+
+ # Set main entity file
+ wf_file = self.crate.add_jsonld(
+ {
+ "@id": "main.nf",
+ "@type": ["File", "SoftwareSourceCode", "ComputationalWorkflow"],
+ },
+ )
+ self.crate.mainEntity = wf_file
+ # self.crate.update_jsonld({"@id": "main.nf", "@type": ["File", "SoftwareSourceCode", "ComputationalWorkflow"]})
+
+ self.add_authors(wf_file)
+ wf_file.append_to("programmingLanguage", programming_language)
+
+ # add readme as description
+ readme = Path(self.pipeline_dir, "README.md")
+ self.crate.description = readme.read_text()
+
+ self.crate.license = "MIT"
+
+ # add doi as identifier
+ # self.crate.identifier = self.pipeline_obj.get("manifest", {}).get("doi", "")
+ self.crate.name = f'Research Object Crate for {self.pipeline_obj.nf_config.get("manifest.name")}'
+
+ if "dev" in self.pipeline_obj.nf_config.get("manifest.version", ""):
+ self.crate.CreativeWorkStatus = "InProgress"
+ else:
+ self.crate.CreativeWorkStatus = "Stable"
+
+ # Add all other files
+ wf_filenames = nf_core.utils.get_wf_files(self.pipeline_dir)
+ log.debug(f"Adding {len(wf_filenames)} workflow files")
+ for fn in wf_filenames:
+ # check if it wasn't already added
+ if fn == "main.nf":
+ continue
+ # add nextflow language to .nf and .config files
+ if fn.endswith(".nf") or fn.endswith(".config"):
+ log.debug(f"Adding workflow file: {fn}")
+ self.crate.add_file(fn, properties={"programmingLanguage": {"@id": "#nextflow"}})
+ if fn.endswith(".png"):
+ log.debug(f"Adding workflow file: {fn}")
+ self.crate.add_file(fn, properties={"@type": ["File", "ImageObject"]})
+ if "metro_map" in fn:
+ log.info(f"Setting main entity image to: {fn}")
+ wf_file.append_to("image", {"@id": fn})
+ if fn.endswith(".md"):
+ log.debug(f"Adding workflow file: {fn}")
+ self.crate.add_file(fn, properties={"encodingFormat": "text/markdown"})
+ else:
+ log.debug(f"Adding workflow file: {fn}")
+ self.crate.add_file(fn)
+
+ # Add keywords from github topics
+
+ def add_authors(self, wf_file):
+ """
+ Add workflow authors to the crate
+ NB: We don't have much metadata here - scope to improve in the future
+ """
+ # add author entity to crate
+
+ try:
+ authors = self.pipeline_obj.nf_config["manifest.author"].split(",")
+ except KeyError:
+ log.error("No author field found in manifest of nextflow.config")
+ return
+ for author in authors:
+ log.debug(f"Adding author: {author}")
+ orcid = get_orcid(author)
+ author_entitity = self.crate.add(Person(self.crate, orcid, properties={"name": author}))
+ wf_file.append_to("author", author_entitity)
+
+ def get_crate_paths(self, path):
+ """Given a pipeline name, directory, or path, set wf_crate_filename"""
+
+ path = Path(path)
+
+ if path.is_dir():
+ self.pipeline_dir = path
+ # wf_crate_filename = path / "ro-crate-metadata.json"
+ elif path.is_file():
+ self.pipeline_dir = path.parent
+ # wf_crate_filename = path
+
+ # Check that the schema file exists
+ if self.pipeline_dir is None:
+ raise OSError(f"Could not find pipeline '{path}'")
+
+
+def get_orcid(name: str) -> Union[str, None]:
+ base_url = "https://pub.orcid.org/v3.0/search/"
+ headers = {
+ "Accept": "application/json",
+ }
+ params = {"q": f'family-name:"{name.split()[-1]}" AND given-names:"{name.split()[0]}"'}
+ response = requests.get(base_url, params=params, headers=headers)
+
+ if response.status_code == 200:
+ json_response = response.json()
+ if json_response.get("num-found") == 1:
+ orcid_uri = json_response.get("result")[0].get("orcid-identifier", {}).get("uri")
+ log.info(f"Using found ORCID for {name}. Please double-check: {orcid_uri}")
+ return orcid_uri
+ else:
+ log.debug(f"No exact ORCID found for {name}. See {response.url}")
+ return None
+ else:
+ return f"API request unsuccessful. Status code: {response.status_code}"
diff --git a/nf_core/schema.py b/nf_core/schema.py
index 7e4726f189..5b68556fb1 100644
--- a/nf_core/schema.py
+++ b/nf_core/schema.py
@@ -1,6 +1,5 @@
""" Code to deal with pipeline JSON Schema """
-from __future__ import print_function
import copy
import json
@@ -43,7 +42,7 @@ def __init__(self):
self.schema_from_scratch = False
self.no_prompts = False
self.web_only = False
- self.web_schema_build_url = "https://nf-co.re/pipeline_schema_builder"
+ self.web_schema_build_url = "https://oldsite.nf-co.re/pipeline_schema_builder"
self.web_schema_build_web_url = None
self.web_schema_build_api_url = None
@@ -107,7 +106,7 @@ def load_lint_schema(self):
def load_schema(self):
"""Load a pipeline schema from a file"""
- with open(self.schema_filename, "r") as fh:
+ with open(self.schema_filename) as fh:
self.schema = json.load(fh)
self.schema_defaults = {}
self.schema_params = {}
@@ -189,7 +188,7 @@ def load_input_params(self, params_path):
"""
# First, try to load as JSON
try:
- with open(params_path, "r") as fh:
+ with open(params_path) as fh:
try:
params = json.load(fh)
except json.JSONDecodeError as e:
@@ -200,7 +199,7 @@ def load_input_params(self, params_path):
log.debug(f"Could not load input params as JSON: {json_e}")
# This failed, try to load as YAML
try:
- with open(params_path, "r") as fh:
+ with open(params_path) as fh:
params = yaml.safe_load(fh)
self.input_params.update(params)
log.debug(f"Loaded YAML input params: {params_path}")
@@ -317,7 +316,7 @@ def validate_config_default_parameter(self, param, schema_param, config_default)
param
] = f"String should not be set to `{config_default}`"
if schema_param["type"] == "boolean":
- if not str(config_default) in ["false", "true"]:
+ if str(config_default) not in ["false", "true"]:
self.invalid_nextflow_config_default_parameters[
param
] = f"Booleans should only be true or false, not `{config_default}`"
@@ -634,7 +633,7 @@ def build_schema(self, pipeline_dir, no_prompts, web_only, url):
# Extra help for people running offline
if "Could not connect" in e.args[0]:
log.info(
- "If you're working offline, now copy your schema ({}) and paste at https://nf-co.re/pipeline_schema_builder".format(
+ "If you're working offline, now copy your schema ({}) and paste at https://oldsite.nf-co.re/pipeline_schema_builder".format(
self.schema_filename
)
)
@@ -757,9 +756,7 @@ def prompt_remove_schema_notfound_config(self, p_key):
if self.no_prompts or self.schema_from_scratch:
return True
if Confirm.ask(
- ":question: Unrecognised [bold]'params.{}'[/] found in the schema but not in the pipeline config! [yellow]Remove it?".format(
- p_key
- )
+ f":question: Unrecognised [bold]'params.{p_key}'[/] found in the schema but not in the pipeline config! [yellow]Remove it?"
):
return True
return False
@@ -856,6 +853,7 @@ def launch_web_builder(self):
"""
Send pipeline schema to web builder and wait for response
"""
+
content = {
"post_content": "json_schema",
"api": "true",
@@ -864,6 +862,7 @@ def launch_web_builder(self):
"schema": json.dumps(self.schema),
}
web_response = nf_core.utils.poll_nfcore_web_api(self.web_schema_build_url, content)
+
try:
if "api_url" not in web_response:
raise AssertionError('"api_url" not in web_response')
diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py
index ffba41f9da..3a87190422 100644
--- a/nf_core/subworkflows/lint/__init__.py
+++ b/nf_core/subworkflows/lint/__init__.py
@@ -6,7 +6,6 @@
nf-core subworkflows lint
"""
-from __future__ import print_function
import logging
import os
@@ -16,7 +15,7 @@
import nf_core.modules.modules_utils
import nf_core.utils
-from nf_core.components.lint import ComponentLint, LintException, LintResult
+from nf_core.components.lint import ComponentLint, LintExceptionError, LintResult
from nf_core.lint_utils import console
log = logging.getLogger(__name__)
@@ -114,11 +113,11 @@ def lint(
# Only lint the given module
if subworkflow:
if all_subworkflows:
- raise LintException("You cannot specify a tool and request all tools to be linted.")
+ raise LintExceptionError("You cannot specify a tool and request all tools to be linted.")
local_subworkflows = []
remote_subworkflows = [s for s in self.all_remote_components if s.component_name == subworkflow]
if len(remote_subworkflows) == 0:
- raise LintException(f"Could not find the specified subworkflow: '{subworkflow}'")
+ raise LintExceptionError(f"Could not find the specified subworkflow: '{subworkflow}'")
else:
local_subworkflows = self.all_local_components
remote_subworkflows = self.all_remote_components
diff --git a/nf_core/subworkflows/lint/main_nf.py b/nf_core/subworkflows/lint/main_nf.py
index c7ce77490d..f59e1e4279 100644
--- a/nf_core/subworkflows/lint/main_nf.py
+++ b/nf_core/subworkflows/lint/main_nf.py
@@ -32,7 +32,7 @@ def main_nf(_, subworkflow):
if lines is None:
try:
# Check whether file exists and load it
- with open(subworkflow.main_nf, "r") as fh:
+ with open(subworkflow.main_nf) as fh:
lines = fh.readlines()
subworkflow.passed.append(("main_nf_exists", "Subworkflow file exists", subworkflow.main_nf))
except FileNotFoundError:
@@ -45,30 +45,30 @@ def main_nf(_, subworkflow):
subworkflow_lines = []
workflow_lines = []
main_lines = []
- for l in lines:
- if re.search(r"^\s*workflow\s*\w*\s*{", l) and state == "subworkflow":
+ for line in lines:
+ if re.search(r"^\s*workflow\s*\w*\s*{", line) and state == "subworkflow":
state = "workflow"
- if re.search(r"take\s*:", l) and state in ["workflow"]:
+ if re.search(r"take\s*:", line) and state in ["workflow"]:
state = "take"
continue
- if re.search(r"main\s*:", l) and state in ["take", "workflow"]:
+ if re.search(r"main\s*:", line) and state in ["take", "workflow"]:
state = "main"
continue
- if re.search(r"emit\s*:", l) and state in ["take", "main", "workflow"]:
+ if re.search(r"emit\s*:", line) and state in ["take", "main", "workflow"]:
state = "emit"
continue
# Perform state-specific linting checks
- if state == "subworkflow" and not _is_empty(l):
- subworkflow_lines.append(l)
- if state == "workflow" and not _is_empty(l):
- workflow_lines.append(l)
- if state == "take" and not _is_empty(l):
- inputs.extend(_parse_input(subworkflow, l))
- if state == "emit" and not _is_empty(l):
- outputs.extend(_parse_output(subworkflow, l))
- if state == "main" and not _is_empty(l):
- main_lines.append(l)
+ if state == "subworkflow" and not _is_empty(line):
+ subworkflow_lines.append(line)
+ if state == "workflow" and not _is_empty(line):
+ workflow_lines.append(line)
+ if state == "take" and not _is_empty(line):
+ inputs.extend(_parse_input(subworkflow, line))
+ if state == "emit" and not _is_empty(line):
+ outputs.extend(_parse_output(subworkflow, line))
+ if state == "main" and not _is_empty(line):
+ main_lines.append(line)
# Check that we have required sections
if not len(outputs):
@@ -177,9 +177,9 @@ def check_subworkflow_section(self, lines):
)
includes = []
- for l in lines:
- if l.strip().startswith("include"):
- component_name = l.split("{")[1].split("}")[0].strip()
+ for line in lines:
+ if line.strip().startswith("include"):
+ component_name = line.split("{")[1].split("}")[0].strip()
if " as " in component_name:
component_name = component_name.split(" as ")[1].strip()
includes.append(component_name)
diff --git a/nf_core/subworkflows/lint/meta_yml.py b/nf_core/subworkflows/lint/meta_yml.py
index 4944b26188..24e75eddbf 100644
--- a/nf_core/subworkflows/lint/meta_yml.py
+++ b/nf_core/subworkflows/lint/meta_yml.py
@@ -26,7 +26,7 @@ def meta_yml(subworkflow_lint_object, subworkflow):
"""
# Read the meta.yml file
try:
- with open(subworkflow.meta_yml, "r") as fh:
+ with open(subworkflow.meta_yml) as fh:
meta_yaml = yaml.safe_load(fh)
subworkflow.passed.append(("meta_yml_exists", "Subworkflow `meta.yml` exists", subworkflow.meta_yml))
except FileNotFoundError:
@@ -36,9 +36,7 @@ def meta_yml(subworkflow_lint_object, subworkflow):
# Confirm that the meta.yml file is valid according to the JSON schema
valid_meta_yml = True
try:
- with open(
- Path(subworkflow_lint_object.modules_repo.local_repo_dir, "subworkflows/yaml-schema.json"), "r"
- ) as fh:
+ with open(Path(subworkflow_lint_object.modules_repo.local_repo_dir, "subworkflows/yaml-schema.json")) as fh:
schema = json.load(fh)
jsonschema.validators.validate(instance=meta_yaml, schema=schema)
subworkflow.passed.append(("meta_yml_valid", "Subworkflow `meta.yml` is valid", subworkflow.meta_yml))
@@ -118,7 +116,7 @@ def meta_yml(subworkflow_lint_object, subworkflow):
subworkflow.failed.append(
(
"meta_modules_deprecated",
- f"Deprecated section 'modules' found in `meta.yml`, use 'components' instead",
+ "Deprecated section 'modules' found in `meta.yml`, use 'components' instead",
subworkflow.meta_yml,
)
)
@@ -126,7 +124,7 @@ def meta_yml(subworkflow_lint_object, subworkflow):
subworkflow.passed.append(
(
"meta_modules_deprecated",
- f"Deprecated section 'modules' not found in `meta.yml`",
+ "Deprecated section 'modules' not found in `meta.yml`",
subworkflow.meta_yml,
)
)
diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py
index 1ebced6d42..f7284320ea 100644
--- a/nf_core/subworkflows/lint/subworkflow_tests.py
+++ b/nf_core/subworkflows/lint/subworkflow_tests.py
@@ -52,14 +52,14 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent):
)
if subworkflow.nftest_main_nf.is_file():
- with open(subworkflow.nftest_main_nf, "r") as fh:
+ with open(subworkflow.nftest_main_nf) as fh:
# Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test
if "snapshot(" in fh.read():
snap_file = subworkflow.nftest_testdir / "main.nf.test.snap"
if snap_file.is_file():
subworkflow.passed.append(("test_snapshot_exists", "test `main.nf.test.snap` exists", snap_file))
# Validate no empty files
- with open(snap_file, "r") as snap_fh:
+ with open(snap_file) as snap_fh:
try:
snap_content = json.load(snap_fh)
for test_name in snap_content.keys():
@@ -158,7 +158,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent):
pytest_yml_path = subworkflow.base_dir / "tests" / "config" / "pytest_modules.yml"
if pytest_yml_path.is_file() and not is_pytest:
try:
- with open(pytest_yml_path, "r") as fh:
+ with open(pytest_yml_path) as fh:
pytest_yml = yaml.safe_load(fh)
if "subworkflows/" + subworkflow.component_name in pytest_yml.keys():
subworkflow.failed.append(
@@ -178,7 +178,7 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent):
if subworkflow.tags_yml.is_file():
# Check tags.yml exists and it has the correct entry
subworkflow.passed.append(("test_tags_yml_exists", "file `tags.yml` exists", subworkflow.tags_yml))
- with open(subworkflow.tags_yml, "r") as fh:
+ with open(subworkflow.tags_yml) as fh:
tags_yml = yaml.safe_load(fh)
if "subworkflows/" + subworkflow.component_name in tags_yml.keys():
subworkflow.passed.append(("test_tags_yml", "correct entry in tags.yml", subworkflow.tags_yml))
diff --git a/nf_core/sync.py b/nf_core/sync.py
index 5402a6121d..995baeacd2 100644
--- a/nf_core/sync.py
+++ b/nf_core/sync.py
@@ -23,13 +23,13 @@
log = logging.getLogger(__name__)
-class SyncException(Exception):
+class SyncExceptionError(Exception):
"""Exception raised when there was an error with TEMPLATE branch synchronisation"""
pass
-class PullRequestException(Exception):
+class PullRequestExceptionError(Exception):
"""Exception raised when there was an error creating a Pull-Request on GitHub.com"""
pass
@@ -96,7 +96,7 @@ def __init__(
default=False,
).unsafe_ask()
if overwrite_template or "template" not in self.config_yml:
- with open(template_yaml_path, "r") as f:
+ with open(template_yaml_path) as f:
self.config_yml["template"] = yaml.safe_load(f)
with open(self.config_yml_path, "w") as fh:
yaml.safe_dump(self.config_yml, fh)
@@ -138,20 +138,20 @@ def sync(self):
try:
# Check that we have an API auth token
if os.environ.get("GITHUB_AUTH_TOKEN", "") == "":
- raise PullRequestException("GITHUB_AUTH_TOKEN not set!")
+ raise PullRequestExceptionError("GITHUB_AUTH_TOKEN not set!")
# Check that we know the github username and repo name
if self.gh_username is None and self.gh_repo is None:
- raise PullRequestException("Could not find GitHub username and repo name")
+ raise PullRequestExceptionError("Could not find GitHub username and repo name")
self.push_template_branch()
self.create_merge_base_branch()
self.push_merge_branch()
self.make_pull_request()
self.close_open_template_merge_prs()
- except PullRequestException as e:
+ except PullRequestExceptionError as e:
self.reset_target_dir()
- raise PullRequestException(e)
+ raise PullRequestExceptionError(e)
self.reset_target_dir()
@@ -170,7 +170,7 @@ def inspect_sync_dir(self):
try:
self.repo = git.Repo(self.pipeline_dir)
except InvalidGitRepositoryError:
- raise SyncException(f"'{self.pipeline_dir}' does not appear to be a git repository")
+ raise SyncExceptionError(f"'{self.pipeline_dir}' does not appear to be a git repository")
# get current branch so we can switch back later
self.original_branch = self.repo.active_branch.name
@@ -178,7 +178,7 @@ def inspect_sync_dir(self):
# Check to see if there are uncommitted changes on current branch
if self.repo.is_dirty(untracked_files=True):
- raise SyncException(
+ raise SyncExceptionError(
"Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core sync"
)
@@ -192,7 +192,7 @@ def get_wf_config(self):
log.info(f"Checking out workflow branch '{self.from_branch}'")
self.repo.git.checkout(self.from_branch)
except GitCommandError:
- raise SyncException(f"Branch `{self.from_branch}` not found!")
+ raise SyncExceptionError(f"Branch `{self.from_branch}` not found!")
# If not specified, get the name of the active branch
if not self.from_branch:
@@ -208,7 +208,7 @@ def get_wf_config(self):
# Check that we have the required variables
for rvar in self.required_config_vars:
if rvar not in self.wf_config:
- raise SyncException(f"Workflow config variable `{rvar}` not found!")
+ raise SyncExceptionError(f"Workflow config variable `{rvar}` not found!")
def checkout_template_branch(self):
"""
@@ -223,7 +223,7 @@ def checkout_template_branch(self):
try:
self.repo.git.checkout("TEMPLATE")
except GitCommandError:
- raise SyncException("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'")
+ raise SyncExceptionError("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'")
def delete_template_branch_files(self):
"""
@@ -242,7 +242,7 @@ def delete_template_branch_files(self):
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
- raise SyncException(e)
+ raise SyncExceptionError(e)
def make_template_pipeline(self):
"""
@@ -272,7 +272,7 @@ def make_template_pipeline(self):
except Exception as err:
# Reset to where you were to prevent git getting messed up.
self.repo.git.reset("--hard")
- raise SyncException(f"Failed to rebuild pipeline from template with error:\n{err}")
+ raise SyncExceptionError(f"Failed to rebuild pipeline from template with error:\n{err}")
def commit_template_changes(self):
"""If we have any changes with the new template files, make a git commit"""
@@ -287,7 +287,7 @@ def commit_template_changes(self):
self.made_changes = True
log.info("Committed changes to 'TEMPLATE' branch")
except Exception as e:
- raise SyncException(f"Could not commit changes to TEMPLATE:\n{e}")
+ raise SyncExceptionError(f"Could not commit changes to TEMPLATE:\n{e}")
return True
def push_template_branch(self):
@@ -299,7 +299,7 @@ def push_template_branch(self):
try:
self.repo.git.push()
except GitCommandError as e:
- raise PullRequestException(f"Could not push TEMPLATE branch:\n {e}")
+ raise PullRequestExceptionError(f"Could not push TEMPLATE branch:\n {e}")
def create_merge_base_branch(self):
"""Create a new branch from the updated TEMPLATE branch
@@ -326,7 +326,7 @@ def create_merge_base_branch(self):
try:
self.repo.create_head(self.merge_branch)
except GitCommandError as e:
- raise SyncException(f"Could not create new branch '{self.merge_branch}'\n{e}")
+ raise SyncExceptionError(f"Could not create new branch '{self.merge_branch}'\n{e}")
def push_merge_branch(self):
"""Push the newly created merge branch to the remote repository"""
@@ -335,7 +335,7 @@ def push_merge_branch(self):
origin = self.repo.remote()
origin.push(self.merge_branch)
except GitCommandError as e:
- raise PullRequestException(f"Could not push branch '{self.merge_branch}':\n {e}")
+ raise PullRequestExceptionError(f"Could not push branch '{self.merge_branch}':\n {e}")
def make_pull_request(self):
"""Create a pull request to a base branch (default: dev),
@@ -374,7 +374,7 @@ def make_pull_request(self):
)
except Exception as e:
stderr.print_exception()
- raise PullRequestException(f"Something went badly wrong - {e}")
+ raise PullRequestExceptionError(f"Something went badly wrong - {e}")
else:
self.gh_pr_returned_data = r.json()
self.pr_url = self.gh_pr_returned_data["html_url"]
@@ -395,7 +395,7 @@ def close_open_template_merge_prs(self):
try:
list_prs_json = json.loads(list_prs_request.content)
list_prs_pp = json.dumps(list_prs_json, indent=4)
- except:
+ except Exception:
list_prs_json = list_prs_request.content
list_prs_pp = list_prs_request.content
@@ -438,7 +438,7 @@ def close_open_pr(self, pr):
try:
pr_request_json = json.loads(pr_request.content)
pr_request_pp = json.dumps(pr_request_json, indent=4)
- except:
+ except Exception:
pr_request_json = pr_request.content
pr_request_pp = pr_request.content
@@ -462,4 +462,4 @@ def reset_target_dir(self):
try:
self.repo.git.checkout(self.original_branch)
except GitCommandError as e:
- raise SyncException(f"Could not reset to original branch `{self.original_branch}`:\n{e}")
+ raise SyncExceptionError(f"Could not reset to original branch `{self.original_branch}`:\n{e}")
diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py
index a2107f633c..ac0f467e66 100644
--- a/nf_core/synced_repo.py
+++ b/nf_core/synced_repo.py
@@ -6,7 +6,6 @@
from typing import Dict
import git
-import rich.progress
from git.exc import GitCommandError
from nf_core.utils import load_tools_config
@@ -117,8 +116,6 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa
self.remote_url = remote_url
- self.fullname = nf_core.modules.modules_utils.repo_full_name_from_remote(self.remote_url)
-
self.setup_local_repo(remote_url, branch, hide_progress)
config_fn, repo_config = load_tools_config(self.local_repo_dir)
diff --git a/nf_core/utils.py b/nf_core/utils.py
index bcc8faa3fd..ef70d42b69 100644
--- a/nf_core/utils.py
+++ b/nf_core/utils.py
@@ -4,6 +4,7 @@
import concurrent.futures
import datetime
import errno
+import fnmatch
import hashlib
import io
import json
@@ -40,14 +41,29 @@
[
("qmark", "fg:ansiblue bold"), # token in front of the question
("question", "bold"), # question text
- ("answer", "fg:ansigreen nobold bg:"), # submitted answer text behind the question
- ("pointer", "fg:ansiyellow bold"), # pointer used in select and checkbox prompts
- ("highlighted", "fg:ansiblue bold"), # pointed-at choice in select and checkbox prompts
- ("selected", "fg:ansiyellow noreverse bold"), # style for a selected item of a checkbox
+ (
+ "answer",
+ "fg:ansigreen nobold bg:",
+ ), # submitted answer text behind the question
+ (
+ "pointer",
+ "fg:ansiyellow bold",
+ ), # pointer used in select and checkbox prompts
+ (
+ "highlighted",
+ "fg:ansiblue bold",
+ ), # pointed-at choice in select and checkbox prompts
+ (
+ "selected",
+ "fg:ansiyellow noreverse bold",
+ ), # style for a selected item of a checkbox
("separator", "fg:ansiblack"), # separator in lists
("instruction", ""), # user instructions for select, rawselect, checkbox
("text", ""), # plain text
- ("disabled", "fg:gray italic"), # disabled choices for select and checkbox prompts
+ (
+ "disabled",
+ "fg:gray italic",
+ ), # disabled choices for select and checkbox prompts
("choice-default", "fg:ansiblack"),
("choice-default-changed", "fg:ansiyellow"),
("choice-required", "fg:ansired"),
@@ -58,7 +74,10 @@
os.environ.get("XDG_CACHE_HOME", os.path.join(os.getenv("HOME") or "", ".cache")),
"nfcore",
)
-NFCORE_DIR = os.path.join(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")), "nfcore")
+NFCORE_DIR = os.path.join(
+ os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME") or "", ".config")),
+ "nfcore",
+)
def fetch_remote_version(source_url):
@@ -67,7 +86,11 @@ def fetch_remote_version(source_url):
return remote_version
-def check_if_outdated(current_version=None, remote_version=None, source_url="https://nf-co.re/tools_version"):
+def check_if_outdated(
+ current_version=None,
+ remote_version=None,
+ source_url="https://nf-co.re/tools_version",
+):
"""
Check if the current version of nf-core is outdated
"""
@@ -139,7 +162,7 @@ def __init__(self, wf_path):
try:
repo = git.Repo(self.wf_path)
self.git_sha = repo.head.object.hexsha
- except:
+ except Exception:
log.debug(f"Could not find git hash for pipeline: {self.wf_path}")
# Overwrite if we have the last commit from the PR - otherwise we get a merge commit hash
@@ -181,14 +204,14 @@ def _load_pipeline_config(self):
self.pipeline_prefix, self.pipeline_name = self.nf_config.get("manifest.name", "").strip("'").split("/")
- nextflowVersionMatch = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", ""))
- if nextflowVersionMatch:
- self.minNextflowVersion = nextflowVersionMatch.group(0)
+ nextflow_version_match = re.search(r"[0-9\.]+(-edge)?", self.nf_config.get("manifest.nextflowVersion", ""))
+ if nextflow_version_match:
+ self.minNextflowVersion = nextflow_version_match.group(0)
def _load_conda_environment(self):
"""Try to load the pipeline environment.yml file, if it exists"""
try:
- with open(os.path.join(self.wf_path, "environment.yml"), "r") as fh:
+ with open(os.path.join(self.wf_path, "environment.yml")) as fh:
self.conda_config = yaml.safe_load(fh)
except FileNotFoundError:
log.debug("No conda `environment.yml` file found.")
@@ -215,7 +238,7 @@ def is_pipeline_directory(wf_path):
raise UserWarning(f"'{wf_path}' is not a pipeline - '{fn}' is missing")
-def fetch_wf_config(wf_path, cache_config=True):
+def fetch_wf_config(wf_path: Union[Path, str], cache_config=True) -> dict:
"""Uses Nextflow to retrieve the the configuration variables
from a Nextflow workflow.
@@ -228,20 +251,19 @@ def fetch_wf_config(wf_path, cache_config=True):
"""
log.debug(f"Got '{wf_path}' as path")
-
+ wf_path = Path(wf_path)
config = {}
cache_fn = None
cache_basedir = None
cache_path = None
# Nextflow home directory - use env var if set, or default to ~/.nextflow
- nxf_home = os.environ.get("NXF_HOME", os.path.join(os.getenv("HOME"), ".nextflow"))
+ nxf_home = Path(os.environ.get("NXF_HOME", Path(os.getenv("HOME", ""), ".nextflow")))
# Build a cache directory if we can
- if os.path.isdir(nxf_home):
- cache_basedir = os.path.join(nxf_home, "nf-core")
- if not os.path.isdir(cache_basedir):
- os.mkdir(cache_basedir)
+ if (nxf_home).is_dir():
+ cache_basedir = nxf_home / "nf-core"
+ cache_basedir.mkdir(parents=True, exist_ok=True)
# If we're given a workflow object with a commit, see if we have a cached copy
cache_fn = None
@@ -249,7 +271,7 @@ def fetch_wf_config(wf_path, cache_config=True):
concat_hash = ""
for fn in ["nextflow.config", "main.nf"]:
try:
- with open(os.path.join(wf_path, fn), "rb") as fh:
+ with open(Path(wf_path, fn), "rb") as fh:
concat_hash += hashlib.sha256(fh.read()).hexdigest()
except FileNotFoundError:
pass
@@ -259,10 +281,10 @@ def fetch_wf_config(wf_path, cache_config=True):
cache_fn = f"wf-config-cache-{bighash[:25]}.json"
if cache_basedir and cache_fn:
- cache_path = os.path.join(cache_basedir, cache_fn)
- if os.path.isfile(cache_path) and cache_config is True:
+ cache_path = Path(cache_basedir, cache_fn)
+ if cache_path.is_file() and cache_config is True:
log.debug(f"Found a config cache, loading: {cache_path}")
- with open(cache_path, "r") as fh:
+ with open(cache_path) as fh:
try:
config = json.load(fh)
except json.JSONDecodeError as e:
@@ -274,8 +296,8 @@ def fetch_wf_config(wf_path, cache_config=True):
result = run_cmd("nextflow", f"config -flat {wf_path}")
if result is not None:
nfconfig_raw, _ = result
- for l in nfconfig_raw.splitlines():
- ul = l.decode("utf-8")
+ for line in nfconfig_raw.splitlines():
+ ul = line.decode("utf-8")
try:
k, v = ul.split(" = ", 1)
config[k] = v.strip("'\"")
@@ -286,9 +308,9 @@ def fetch_wf_config(wf_path, cache_config=True):
# Values in this file are likely to be complex, so don't both trying to capture them. Just get the param name.
try:
main_nf = os.path.join(wf_path, "main.nf")
- with open(main_nf, "r") as fh:
- for l in fh:
- match = re.match(r"^\s*(params\.[a-zA-Z0-9_]+)\s*=", l)
+ with open(main_nf) as fh:
+ for line in fh:
+ match = re.match(r"^\s*(params\.[a-zA-Z0-9_]+)\s*=", line)
if match:
config[match.group(1)] = "null"
except FileNotFoundError as e:
@@ -312,7 +334,7 @@ def run_cmd(executable: str, cmd: str) -> Union[Tuple[bytes, bytes], None]:
full_cmd = f"{executable} {cmd}"
log.debug(f"Running command: {full_cmd}")
try:
- proc = subprocess.run(shlex.split(full_cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
+ proc = subprocess.run(shlex.split(full_cmd), capture_output=True, check=True)
return (proc.stdout, proc.stderr)
except OSError as e:
if e.errno == errno.ENOENT:
@@ -408,6 +430,7 @@ def poll_nfcore_web_api(api_url, post_data=None):
if post_data is None:
response = requests.get(api_url, headers={"Cache-Control": "no-cache"})
else:
+ log.debug(f"requesting {api_url} with {post_data}")
response = requests.post(url=api_url, data=post_data)
except requests.exceptions.Timeout:
raise AssertionError(f"URL timed out: {api_url}")
@@ -433,7 +456,7 @@ def poll_nfcore_web_api(api_url, post_data=None):
return web_response
-class GitHub_API_Session(requests_cache.CachedSession):
+class GitHubAPISession(requests_cache.CachedSession):
"""
Class to provide a single session for interacting with the GitHub API for a run.
Inherits the requests_cache.CachedSession and adds additional functionality,
@@ -480,10 +503,11 @@ def __call__(self, r):
gh_cli_config_fn = os.path.expanduser("~/.config/gh/hosts.yml")
if self.auth is None and os.path.exists(gh_cli_config_fn):
try:
- with open(gh_cli_config_fn, "r") as fh:
+ with open(gh_cli_config_fn) as fh:
gh_cli_config = yaml.safe_load(fh)
self.auth = requests.auth.HTTPBasicAuth(
- gh_cli_config["github.com"]["user"], gh_cli_config["github.com"]["oauth_token"]
+ gh_cli_config["github.com"]["user"],
+ gh_cli_config["github.com"]["oauth_token"],
)
self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}"
except Exception:
@@ -590,7 +614,7 @@ def request_retry(self, url, post_data=None):
# Single session object to use for entire codebase. Not sure if there's a better way to do this?
-gh_api = GitHub_API_Session()
+gh_api = GitHubAPISession()
def anaconda_package(dep, dep_channels=None):
@@ -666,18 +690,18 @@ def parse_anaconda_licence(anaconda_response, version=None):
# Clean up / standardise licence names
clean_licences = []
- for l in licences:
- l = re.sub(r"GNU General Public License v\d \(([^\)]+)\)", r"\1", l)
- l = re.sub(r"GNU GENERAL PUBLIC LICENSE", "GPL", l, flags=re.IGNORECASE)
- l = l.replace("GPL-", "GPLv")
- l = re.sub(r"GPL\s*([\d\.]+)", r"GPL v\1", l) # Add v prefix to GPL version if none found
- l = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", l) # Remove superflous .0 from GPL version
- l = re.sub(r"GPL \(([^\)]+)\)", r"GPL \1", l)
- l = re.sub(r"GPL\s*v", "GPL v", l) # Normalise whitespace to one space between GPL and v
- l = re.sub(r"\s*(>=?)\s*(\d)", r" \1\2", l) # Normalise whitespace around >= GPL versions
- l = l.replace("Clause", "clause") # BSD capitilisation
- l = re.sub(r"-only$", "", l) # Remove superflous GPL "only" version suffixes
- clean_licences.append(l)
+ for license in licences:
+ license = re.sub(r"GNU General Public License v\d \(([^\)]+)\)", r"\1", license)
+ license = re.sub(r"GNU GENERAL PUBLIC LICENSE", "GPL", license, flags=re.IGNORECASE)
+ license = license.replace("GPL-", "GPLv")
+ license = re.sub(r"GPL\s*([\d\.]+)", r"GPL v\1", license) # Add v prefix to GPL version if none found
+ license = re.sub(r"GPL\s*v(\d).0", r"GPL v\1", license) # Remove superflous .0 from GPL version
+ license = re.sub(r"GPL \(([^\)]+)\)", r"GPL \1", license)
+ license = re.sub(r"GPL\s*v", "GPL v", license) # Normalise whitespace to one space between GPL and v
+ license = re.sub(r"\s*(>=?)\s*(\d)", r" \1\2", license) # Normalise whitespace around >= GPL versions
+ license = license.replace("Clause", "clause") # BSD capitilisation
+ license = re.sub(r"-only$", "", license) # Remove superflous GPL "only" version suffixes
+ clean_licences.append(license)
return clean_licences
@@ -748,12 +772,18 @@ def get_tag_date(tag_date):
# Obtain version and build
match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"])
if match is not None:
- all_docker[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img}
+ all_docker[match.group(1)] = {
+ "date": get_tag_date(img["updated"]),
+ "image": img,
+ }
elif img["image_type"] == "Singularity":
# Obtain version and build
match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"])
if match is not None:
- all_singularity[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img}
+ all_singularity[match.group(1)] = {
+ "date": get_tag_date(img["updated"]),
+ "image": img,
+ }
# Obtain common builds from Docker and Singularity images
common_keys = list(all_docker.keys() & all_singularity.keys())
current_date = None
@@ -792,7 +822,7 @@ def increase_indent(self, flow=False, indentless=False):
See https://github.com/yaml/pyyaml/issues/234#issuecomment-765894586
"""
- return super(CustomDumper, self).increase_indent(flow=flow, indentless=False)
+ return super().increase_indent(flow=flow, indentless=False)
# HACK: insert blank lines between top-level objects
# inspired by https://stackoverflow.com/a/44284819/3786245
@@ -879,13 +909,19 @@ def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False):
# Releases
if len(wf_releases) > 0:
for tag in map(lambda release: release.get("tag_name"), wf_releases):
- tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")]
+ tag_display = [
+ ("fg:ansiblue", f"{tag} "),
+ ("class:choice-default", "[release]"),
+ ]
choices.append(questionary.Choice(title=tag_display, value=tag))
tag_set.append(tag)
# Branches
for branch in wf_branches.keys():
- branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")]
+ branch_display = [
+ ("fg:ansiyellow", f"{branch} "),
+ ("class:choice-default", "[branch]"),
+ ]
choices.append(questionary.Choice(title=branch_display, value=branch))
tag_set.append(branch)
@@ -916,7 +952,8 @@ def validate(self, value):
return True
else:
raise questionary.ValidationError(
- message="Invalid remote cache index file", cursor_position=len(value.text)
+ message="Invalid remote cache index file",
+ cursor_position=len(value.text),
)
else:
return True
@@ -946,7 +983,13 @@ def get_repo_releases_branches(pipeline, wfs):
pipeline = wf.full_name
# Store releases and stop loop
- wf_releases = list(sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True))
+ wf_releases = list(
+ sorted(
+ wf.releases,
+ key=lambda k: k.get("published_at_timestamp", 0),
+ reverse=True,
+ )
+ )
break
# Arbitrary GitHub repo
@@ -966,7 +1009,13 @@ def get_repo_releases_branches(pipeline, wfs):
raise AssertionError(f"Not able to find pipeline '{pipeline}'")
except AttributeError:
# Success! We have a list, which doesn't work with .get() which is looking for a dict key
- wf_releases = list(sorted(rel_r.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True))
+ wf_releases = list(
+ sorted(
+ rel_r.json(),
+ key=lambda k: k.get("published_at_timestamp", 0),
+ reverse=True,
+ )
+ )
# Get release tag commit hashes
if len(wf_releases) > 0:
@@ -1025,7 +1074,7 @@ def load_tools_config(directory: Union[str, Path] = "."):
log.debug(f"No tools config file found: {CONFIG_PATHS[0]}")
return Path(directory, CONFIG_PATHS[0]), {}
- with open(config_fn, "r") as fh:
+ with open(config_fn) as fh:
tools_config = yaml.safe_load(fh)
# If the file is empty
@@ -1145,7 +1194,7 @@ def validate_file_md5(file_name, expected_md5hex):
if file_md5hex.upper() == expected_md5hex.upper():
log.debug(f"md5 sum of image matches expected: {expected_md5hex}")
else:
- raise IOError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}")
+ raise OSError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}")
return True
@@ -1193,3 +1242,21 @@ def set_wd(path: Path) -> Generator[None, None, None]:
yield
finally:
os.chdir(start_wd)
+
+
+def get_wf_files(wf_path: Path):
+ """Return a list of all files in a directory (ignores .gitigore files)"""
+
+ wf_files = []
+
+ with open(Path(wf_path, ".gitignore")) as f:
+ lines = f.read().splitlines()
+ ignore = [line for line in lines if line and not line.startswith("#")]
+
+ for path in Path(wf_path).rglob("*"):
+ if any(fnmatch.fnmatch(str(path), pattern) for pattern in ignore):
+ continue
+ if path.is_file():
+ wf_files.append(str(path))
+
+ return wf_files
diff --git a/pyproject.toml b/pyproject.toml
index 2380073107..d75ae89df6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,10 +5,6 @@ requires = [
"wheel"
]
-[tool.black]
-line-length = 120
-target_version = ["py37", "py38", "py39", "py310"]
-
[tool.pytest.ini_options]
markers = [
"datafiles: load datafiles"
@@ -16,7 +12,18 @@ markers = [
testpaths = ["tests"]
norecursedirs = [ ".*", "build", "dist", "*.egg", "data", "__pycache__", ".github", "nf_core", "docs"]
-[tool.isort]
-profile = "black"
-known_first_party = ["nf_core"]
-multi_line_output = 3
+[tool.ruff]
+line-length = 120
+target-version = "py38"
+select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"]
+cache-dir = "~/.cache/ruff"
+
+[tool.ruff.isort]
+known-first-party = ["nf_core"]
+
+[tool.ruff.per-file-ignores]
+"__init__.py" = ["E402", "F401"]
+
+[tool.ruff.lint.pep8-naming]
+extend-ignore-names = ["mocked_*", "*allOf", "*URI*"]
+
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 13dba6f30d..9fbb49c10c 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,15 +1,13 @@
-black
-isort
+mypy
myst_parser
pytest-cov
pytest-datafiles
responses
+ruff
Sphinx
sphinx-rtd-theme
-mypy
-types-PyYAML
-pyupgrade
-types-requests
types-jsonschema
types-Markdown
+types-PyYAML
+types-requests
types-setuptools
diff --git a/requirements.txt b/requirements.txt
index add52f4bc6..526efb7306 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,10 +5,11 @@ jinja2
jsonschema>=3.0
markdown>=3.3
packaging
+pdiff
pre-commit
prompt_toolkit>=3.0.3
-pytest>=7.0.0
pytest-workflow>=1.6.0
+pytest>=7.0.0
pyyaml
questionary>=1.8.0
refgenie
@@ -17,4 +18,4 @@ requests_cache
rich-click>=1.6.1
rich>=13.3.1
tabulate
-pdiff
+trogon
diff --git a/setup.py b/setup.py
index 84c6529dce..057cae202f 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@
from setuptools import find_packages, setup
-version = "2.11.1"
+version = "2.12dev"
with open("README.md") as f:
readme = f.read()
diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py
index 46fd63fe3f..c5067d7210 100644
--- a/tests/components/generate_snapshot.py
+++ b/tests/components/generate_snapshot.py
@@ -26,7 +26,7 @@ def test_generate_snapshot_module(self):
snap_path = Path("modules", "nf-core-test", "fastqc", "tests", "main.nf.test.snap")
assert snap_path.exists()
- with open(snap_path, "r") as fh:
+ with open(snap_path) as fh:
snap_content = json.load(fh)
assert "versions" in snap_content
assert "content" in snap_content["versions"]
@@ -48,7 +48,7 @@ def test_generate_snapshot_subworkflow(self):
snap_path = Path("subworkflows", "nf-core-test", "bam_sort_stats_samtools", "tests", "main.nf.test.snap")
assert snap_path.exists()
- with open(snap_path, "r") as fh:
+ with open(snap_path) as fh:
snap_content = json.load(fh)
assert "test_bam_sort_stats_samtools_paired_end_flagstats" in snap_content
assert (
@@ -86,7 +86,7 @@ def test_update_snapshot_module(self):
with set_wd(self.nfcore_modules):
snap_path = Path("modules", "nf-core-test", "bwa", "mem", "tests", "main.nf.test.snap")
- with open(snap_path, "r") as fh:
+ with open(snap_path) as fh:
snap_content = json.load(fh)
original_timestamp = snap_content["Single-End"]["timestamp"]
# delete the timestamp in json
@@ -103,7 +103,7 @@ def test_update_snapshot_module(self):
)
snap_generator.run()
- with open(snap_path, "r") as fh:
+ with open(snap_path) as fh:
snap_content = json.load(fh)
assert "Single-End" in snap_content
assert snap_content["Single-End"]["timestamp"] != original_timestamp
diff --git a/tests/lint/actions_awsfulltest.py b/tests/lint/actions_awsfulltest.py
index 30293e31a4..bbda92a4d1 100644
--- a/tests/lint/actions_awsfulltest.py
+++ b/tests/lint/actions_awsfulltest.py
@@ -19,7 +19,7 @@ def test_actions_awsfulltest_pass(self):
# Edit .github/workflows/awsfulltest.yml to use -profile test_full
new_pipeline = self._make_pipeline_copy()
- with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh:
+ with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh:
awsfulltest_yml = fh.read()
awsfulltest_yml = awsfulltest_yml.replace("-profile test ", "-profile test_full ")
with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh:
@@ -44,7 +44,7 @@ def test_actions_awsfulltest_fail(self):
# Edit .github/workflows/awsfulltest.yml to use -profile test_full
new_pipeline = self._make_pipeline_copy()
- with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "r") as fh:
+ with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml")) as fh:
awsfulltest_yml = yaml.safe_load(fh)
del awsfulltest_yml[True]["release"]
with open(os.path.join(new_pipeline, ".github", "workflows", "awsfulltest.yml"), "w") as fh:
diff --git a/tests/lint/actions_awstest.py b/tests/lint/actions_awstest.py
index 0e19f781aa..7bfa6052f8 100644
--- a/tests/lint/actions_awstest.py
+++ b/tests/lint/actions_awstest.py
@@ -20,7 +20,7 @@ def test_actions_awstest_fail(self):
# Edit .github/workflows/awsfulltest.yml to use -profile test_full
new_pipeline = self._make_pipeline_copy()
- with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh:
+ with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh:
awstest_yml = yaml.safe_load(fh)
awstest_yml[True]["push"] = ["master"]
with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh:
diff --git a/tests/lint/actions_ci.py b/tests/lint/actions_ci.py
index d44dbb73b5..8734b2f78b 100644
--- a/tests/lint/actions_ci.py
+++ b/tests/lint/actions_ci.py
@@ -31,7 +31,7 @@ def test_actions_ci_fail_wrong_trigger(self):
# Edit .github/workflows/actions_ci.yml to mess stuff up!
new_pipeline = self._make_pipeline_copy()
- with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml"), "r") as fh:
+ with open(os.path.join(new_pipeline, ".github", "workflows", "ci.yml")) as fh:
ci_yml = yaml.safe_load(fh)
ci_yml[True]["push"] = ["dev", "patch"]
ci_yml["jobs"]["test"]["strategy"]["matrix"] = {"nxf_versionnn": ["foo", ""]}
diff --git a/tests/lint/actions_schema_validation.py b/tests/lint/actions_schema_validation.py
index 48bb07e4dd..ad65d90018 100644
--- a/tests/lint/actions_schema_validation.py
+++ b/tests/lint/actions_schema_validation.py
@@ -9,7 +9,7 @@ def test_actions_schema_validation_missing_jobs(self):
"""Missing 'jobs' field should result in failure"""
new_pipeline = self._make_pipeline_copy()
- with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh:
+ with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh:
awstest_yml = yaml.safe_load(fh)
awstest_yml.pop("jobs")
with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh:
@@ -27,7 +27,7 @@ def test_actions_schema_validation_missing_on(self):
"""Missing 'on' field should result in failure"""
new_pipeline = self._make_pipeline_copy()
- with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh:
+ with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh:
awstest_yml = yaml.safe_load(fh)
awstest_yml.pop(True)
with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh:
@@ -46,7 +46,7 @@ def test_actions_schema_validation_fails_for_additional_property(self):
"""Missing 'jobs' field should result in failure"""
new_pipeline = self._make_pipeline_copy()
- with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "r") as fh:
+ with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml")) as fh:
awstest_yml = yaml.safe_load(fh)
awstest_yml["not_jobs"] = awstest_yml["jobs"]
with open(os.path.join(new_pipeline, ".github", "workflows", "awstest.yml"), "w") as fh:
diff --git a/tests/lint/merge_markers.py b/tests/lint/merge_markers.py
index be0d076757..64a62e25c3 100644
--- a/tests/lint/merge_markers.py
+++ b/tests/lint/merge_markers.py
@@ -7,7 +7,7 @@ def test_merge_markers_found(self):
"""Missing 'jobs' field should result in failure"""
new_pipeline = self._make_pipeline_copy()
- with open(os.path.join(new_pipeline, "main.nf"), "r") as fh:
+ with open(os.path.join(new_pipeline, "main.nf")) as fh:
main_nf_content = fh.read()
main_nf_content = ">>>>>>>\n" + main_nf_content
with open(os.path.join(new_pipeline, "main.nf"), "w") as fh:
diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py
index 446b4378b0..721560ce81 100644
--- a/tests/lint/multiqc_config.py
+++ b/tests/lint/multiqc_config.py
@@ -18,7 +18,7 @@ def test_multiqc_config_exists_ignore(self):
def test_multiqc_config_missing_report_section_order(self):
"""Test that linting fails if the multiqc_config.yml file is missing the report_section_order"""
new_pipeline = self._make_pipeline_copy()
- with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh:
+ with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh:
mqc_yml = yaml.safe_load(fh)
mqc_yml_tmp = mqc_yml
mqc_yml.pop("report_section_order")
@@ -36,7 +36,7 @@ def test_multiqc_config_missing_report_section_order(self):
def test_multiqc_incorrect_export_plots(self):
"""Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots"""
new_pipeline = self._make_pipeline_copy()
- with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh:
+ with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh:
mqc_yml = yaml.safe_load(fh)
mqc_yml_tmp = mqc_yml
mqc_yml["export_plots"] = False
@@ -54,7 +54,7 @@ def test_multiqc_incorrect_export_plots(self):
def test_multiqc_config_report_comment_fail(self):
"""Test that linting fails if the multiqc_config.yml file has an incorrect report_comment"""
new_pipeline = self._make_pipeline_copy()
- with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh:
+ with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh:
mqc_yml = yaml.safe_load(fh)
mqc_yml_tmp = mqc_yml
mqc_yml["report_comment"] = "This is a test"
@@ -73,7 +73,7 @@ def test_multiqc_config_report_comment_fail(self):
def test_multiqc_config_report_comment_release_fail(self):
"""Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version"""
new_pipeline = self._make_pipeline_copy()
- with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh:
+ with open(Path(new_pipeline, "assets", "multiqc_config.yml")) as fh:
mqc_yml = yaml.safe_load(fh)
mqc_yml_tmp = mqc_yml
with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh:
diff --git a/tests/lint/nextflow_config.py b/tests/lint/nextflow_config.py
index 1542b8cf65..5d5f8e7345 100644
--- a/tests/lint/nextflow_config.py
+++ b/tests/lint/nextflow_config.py
@@ -43,7 +43,7 @@ def test_nextflow_config_missing_test_profile_failed(self):
new_pipeline = self._make_pipeline_copy()
# Change the name of the test profile so there is no such profile
nf_conf_file = os.path.join(new_pipeline, "nextflow.config")
- with open(nf_conf_file, "r") as f:
+ with open(nf_conf_file) as f:
content = f.read()
fail_content = re.sub(r"\btest\b", "testfail", content)
with open(nf_conf_file, "w") as f:
diff --git a/tests/modules/bump_versions.py b/tests/modules/bump_versions.py
index 3c19041f63..ce8c6dbe11 100644
--- a/tests/modules/bump_versions.py
+++ b/tests/modules/bump_versions.py
@@ -2,17 +2,16 @@
import re
import pytest
-import yaml
import nf_core.modules
-from nf_core.modules.modules_utils import ModuleException
+from nf_core.modules.modules_utils import ModuleExceptionError
def test_modules_bump_versions_single_module(self):
"""Test updating a single module"""
# Change the bpipe/test version to an older version
env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")
- with open(env_yml_path, "r") as fh:
+ with open(env_yml_path) as fh:
content = fh.read()
new_content = re.sub(r"bioconda::star=\d.\d.\d\D?", r"bioconda::star=2.6.1d", content)
with open(env_yml_path, "w") as fh:
@@ -32,7 +31,7 @@ def test_modules_bump_versions_all_modules(self):
def test_modules_bump_versions_fail(self):
"""Fail updating a module with wrong name"""
version_bumper = nf_core.modules.ModuleVersionBumper(pipeline_dir=self.nfcore_modules)
- with pytest.raises(ModuleException) as excinfo:
+ with pytest.raises(ModuleExceptionError) as excinfo:
version_bumper.bump_versions(module="no/module")
assert "Could not find the specified module:" in str(excinfo.value)
@@ -41,7 +40,7 @@ def test_modules_bump_versions_fail_unknown_version(self):
"""Fail because of an unknown version"""
# Change the bpipe/test version to an older version
env_yml_path = os.path.join(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")
- with open(env_yml_path, "r") as fh:
+ with open(env_yml_path) as fh:
content = fh.read()
new_content = re.sub(r"bioconda::bpipe=\d.\d.\d\D?", r"bioconda::bpipe=xxx", content)
with open(env_yml_path, "w") as fh:
diff --git a/tests/modules/create.py b/tests/modules/create.py
index 74e5ec3896..460a1439cb 100644
--- a/tests/modules/create.py
+++ b/tests/modules/create.py
@@ -1,4 +1,3 @@
-import filecmp
import os
import shutil
from pathlib import Path
@@ -87,9 +86,9 @@ def test_modules_migrate(self, mock_rich_ask):
# Clone modules repo with pytests
shutil.rmtree(self.nfcore_modules)
Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH)
- with open(module_dir / "main.nf", "r") as fh:
+ with open(module_dir / "main.nf") as fh:
old_main_nf = fh.read()
- with open(module_dir / "meta.yml", "r") as fh:
+ with open(module_dir / "meta.yml") as fh:
old_meta_yml = fh.read()
# Create a module with --migrate-pytest
@@ -97,9 +96,9 @@ def test_modules_migrate(self, mock_rich_ask):
module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True)
module_create.create()
- with open(module_dir / "main.nf", "r") as fh:
+ with open(module_dir / "main.nf") as fh:
new_main_nf = fh.read()
- with open(module_dir / "meta.yml", "r") as fh:
+ with open(module_dir / "meta.yml") as fh:
new_meta_yml = fh.read()
nextflow_config = module_dir / "tests" / "nextflow.config"
diff --git a/tests/modules/lint.py b/tests/modules/lint.py
index a8a775e6f6..a5d8567b76 100644
--- a/tests/modules/lint.py
+++ b/tests/modules/lint.py
@@ -333,7 +333,7 @@ def test_modules_lint_snapshot_file_missing_fail(self):
def test_modules_lint_snapshot_file_not_needed(self):
"""Test linting a module which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file"""
- with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh:
+ with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh:
content = fh.read()
new_content = content.replace("snapshot(", "snap (")
with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh:
@@ -372,7 +372,7 @@ def test_modules_environment_yml_file_sorted_correctly(self):
def test_modules_environment_yml_file_sorted_incorrectly(self):
"""Test linting a module with an incorrectly sorted environment.yml file"""
- with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "r") as fh:
+ with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh:
yaml_content = yaml.safe_load(fh)
# Add a new dependency to the environment.yml file and reverse the order
yaml_content["dependencies"].append("z")
@@ -548,7 +548,7 @@ def test_modules_missing_test_main_nf(self):
def test_modules_missing_required_tag(self):
"""Test linting a module with a missing required tag"""
- with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh:
+ with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")) as fh:
content = fh.read()
new_content = content.replace("modules_nfcore", "foo")
with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh:
@@ -581,7 +581,7 @@ def test_modules_missing_tags_yml(self):
def test_modules_incorrect_tags_yml_key(self):
"""Test linting a module with an incorrect key in tags.yml file"""
- with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh:
+ with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml")) as fh:
content = fh.read()
new_content = content.replace("bpipe/test:", "bpipe_test:")
with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh:
@@ -598,7 +598,7 @@ def test_modules_incorrect_tags_yml_key(self):
def test_modules_incorrect_tags_yml_values(self):
"""Test linting a module with an incorrect path in tags.yml file"""
- with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh:
+ with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml")) as fh:
content = fh.read()
new_content = content.replace("modules/nf-core/bpipe/test/**", "foo")
with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh:
diff --git a/tests/modules/modules_json.py b/tests/modules/modules_json.py
index 63ee4e743d..a054b6b131 100644
--- a/tests/modules/modules_json.py
+++ b/tests/modules/modules_json.py
@@ -17,7 +17,7 @@
def test_get_modules_json(self):
"""Checks that the get_modules_json function returns the correct result"""
mod_json_path = os.path.join(self.pipeline_dir, "modules.json")
- with open(mod_json_path, "r") as fh:
+ with open(mod_json_path) as fh:
try:
mod_json_sb = json.load(fh)
except json.JSONDecodeError as e:
@@ -73,7 +73,7 @@ def test_mod_json_create(self):
def modify_main_nf(path):
"""Modify a file to test patch creation"""
- with open(path, "r") as fh:
+ with open(path) as fh:
lines = fh.readlines()
# Modify $meta.id to $meta.single_end
lines[1] = ' tag "$meta.single_end"\n'
@@ -112,7 +112,7 @@ def test_mod_json_create_with_patch(self):
assert "branch" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"]["nf-core"]["fastqc"]
# Check that fastqc/main.nf maintains the changes
- with open(module_path / "main.nf", "r") as fh:
+ with open(module_path / "main.nf") as fh:
lines = fh.readlines()
assert lines[1] == ' tag "$meta.single_end"\n'
@@ -214,7 +214,7 @@ def test_mod_json_dump(self):
assert os.path.exists(mod_json_path)
# Check that the dump function writes the correct content
- with open(mod_json_path, "r") as f:
+ with open(mod_json_path) as f:
try:
mod_json_new = json.load(f)
except json.JSONDecodeError as e:
diff --git a/tests/modules/patch.py b/tests/modules/patch.py
index 338d890f2f..dc939c7ea7 100644
--- a/tests/modules/patch.py
+++ b/tests/modules/patch.py
@@ -43,7 +43,7 @@ def setup_patch(pipeline_dir, modify_module):
def modify_main_nf(path):
"""Modify a file to test patch creation"""
- with open(path, "r") as fh:
+ with open(path) as fh:
lines = fh.readlines()
# We want a patch file that looks something like:
# - tuple val(meta), path(reads)
@@ -99,7 +99,7 @@ def test_create_patch_change(self):
)
# Check that the correct lines are in the patch file
- with open(module_path / patch_fn, "r") as fh:
+ with open(module_path / patch_fn) as fh:
patch_lines = fh.readlines()
module_relpath = module_path.relative_to(self.pipeline_dir)
assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines, module_relpath / "main.nf"
@@ -157,7 +157,7 @@ def test_create_patch_try_apply_successful(self):
)
# Check that the correct lines are in the patch file
- with open(module_path / patch_fn, "r") as fh:
+ with open(module_path / patch_fn) as fh:
patch_lines = fh.readlines()
module_relpath = module_path.relative_to(self.pipeline_dir)
assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines
@@ -167,7 +167,7 @@ def test_create_patch_try_apply_successful(self):
assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines
# Check that 'main.nf' is updated correctly
- with open(module_path / "main.nf", "r") as fh:
+ with open(module_path / "main.nf") as fh:
main_nf_lines = fh.readlines()
# These lines should have been removed by the patch
assert " tuple val(meta), path(reads)\n" not in main_nf_lines
@@ -258,7 +258,7 @@ def test_create_patch_update_success(self):
), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME)
# Check that the correct lines are in the patch file
- with open(module_path / patch_fn, "r") as fh:
+ with open(module_path / patch_fn) as fh:
patch_lines = fh.readlines()
module_relpath = module_path.relative_to(self.pipeline_dir)
assert f"--- {module_relpath / 'main.nf'}\n" in patch_lines
@@ -268,7 +268,7 @@ def test_create_patch_update_success(self):
assert "+ tuple val(meta), path(reads), path(index)\n" in patch_lines
# Check that 'main.nf' is updated correctly
- with open(module_path / "main.nf", "r") as fh:
+ with open(module_path / "main.nf") as fh:
main_nf_lines = fh.readlines()
# These lines should have been removed by the patch
assert " tuple val(meta), path(reads)\n" not in main_nf_lines
@@ -300,7 +300,7 @@ def test_create_patch_update_fail(self):
)
# Save the file contents for downstream comparison
- with open(module_path / patch_fn, "r") as fh:
+ with open(module_path / patch_fn) as fh:
patch_contents = fh.read()
update_obj = nf_core.modules.ModuleUpdate(
@@ -317,14 +317,14 @@ def test_create_patch_update_fail(self):
temp_module_dir = temp_dir / BISMARK_ALIGN
for file in os.listdir(temp_module_dir):
assert file in os.listdir(module_path)
- with open(module_path / file, "r") as fh:
+ with open(module_path / file) as fh:
installed = fh.read()
- with open(temp_module_dir / file, "r") as fh:
+ with open(temp_module_dir / file) as fh:
shouldbe = fh.read()
assert installed == shouldbe
# Check that the patch file is unaffected
- with open(module_path / patch_fn, "r") as fh:
+ with open(module_path / patch_fn) as fh:
new_patch_contents = fh.read()
assert patch_contents == new_patch_contents
diff --git a/tests/modules/update.py b/tests/modules/update.py
index 399e9cc12c..5208070fa5 100644
--- a/tests/modules/update.py
+++ b/tests/modules/update.py
@@ -345,7 +345,7 @@ def test_update_only_show_differences_when_patch(self, mock_prompt):
# We modify fastqc because it's one of the modules that can be updated and there's another one before it (custom/dumpsoftwareversions)
module_path = Path(self.pipeline_dir, "modules", "nf-core", "fastqc")
main_path = Path(module_path, "main.nf")
- with open(main_path, "r") as fh:
+ with open(main_path) as fh:
lines = fh.readlines()
for line_index in range(len(lines)):
if lines[line_index] == " label 'process_medium'\n":
diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py
index fc628df34f..002b889671 100644
--- a/tests/subworkflows/create.py
+++ b/tests/subworkflows/create.py
@@ -1,4 +1,3 @@
-import filecmp
import os
import shutil
from pathlib import Path
@@ -53,9 +52,9 @@ def test_subworkflows_migrate(self, mock_rich_ask):
# Clone modules repo with pytests
shutil.rmtree(self.nfcore_modules)
Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH)
- with open(subworkflow_dir / "main.nf", "r") as fh:
+ with open(subworkflow_dir / "main.nf") as fh:
old_main_nf = fh.read()
- with open(subworkflow_dir / "meta.yml", "r") as fh:
+ with open(subworkflow_dir / "meta.yml") as fh:
old_meta_yml = fh.read()
# Create a subworkflow with --migrate-pytest
@@ -65,9 +64,9 @@ def test_subworkflows_migrate(self, mock_rich_ask):
)
subworkflow_create.create()
- with open(subworkflow_dir / "main.nf", "r") as fh:
+ with open(subworkflow_dir / "main.nf") as fh:
new_main_nf = fh.read()
- with open(subworkflow_dir / "meta.yml", "r") as fh:
+ with open(subworkflow_dir / "meta.yml") as fh:
new_meta_yml = fh.read()
nextflow_config = subworkflow_dir / "tests" / "nextflow.config"
diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py
index 1380db2260..b53fef7f0e 100644
--- a/tests/subworkflows/lint.py
+++ b/tests/subworkflows/lint.py
@@ -1,4 +1,3 @@
-import os
from pathlib import Path
import pytest
@@ -87,9 +86,7 @@ def test_subworkflows_lint_snapshot_file_missing_fail(self):
def test_subworkflows_lint_snapshot_file_not_needed(self):
"""Test linting a subworkflow which doesn't need a snapshot file by removing the snapshot keyword in the main.nf.test file"""
- with open(
- Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "r"
- ) as fh:
+ with open(Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test")) as fh:
content = fh.read()
new_content = content.replace("snapshot(", "snap (")
with open(
diff --git a/tests/subworkflows/remove.py b/tests/subworkflows/remove.py
index 53a948778b..dec67875bd 100644
--- a/tests/subworkflows/remove.py
+++ b/tests/subworkflows/remove.py
@@ -1,7 +1,5 @@
from pathlib import Path
-from rich.console import Console
-
from nf_core.modules.modules_json import ModulesJson
@@ -18,7 +16,7 @@ def test_subworkflows_remove_subworkflow(self):
bam_sort_stats_samtools_path = Path(subworkflow_path, "bam_sort_stats_samtools")
bam_stats_samtools_path = Path(subworkflow_path, "bam_stats_samtools")
samtools_index_path = Path(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index")
- mod_json_obj = ModulesJson(self.pipeline_dir)
+ ModulesJson(self.pipeline_dir)
mod_json_before = ModulesJson(self.pipeline_dir).get_modules_json()
assert self.subworkflow_remove.remove("bam_sort_stats_samtools")
mod_json_after = ModulesJson(self.pipeline_dir).get_modules_json()
diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py
index 698086e186..32a69ba180 100644
--- a/tests/subworkflows/update.py
+++ b/tests/subworkflows/update.py
@@ -8,7 +8,6 @@
import nf_core.utils
from nf_core.modules.modules_json import ModulesJson
from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE
-from nf_core.modules.remove import ModuleRemove
from nf_core.modules.update import ModuleUpdate
from nf_core.subworkflows.update import SubworkflowUpdate
@@ -73,7 +72,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self):
assert update_obj.update("fastq_align_bowtie2") is True
assert cmp_component(tmpdir, sw_path) is True
- with open(patch_path, "r") as fh:
+ with open(patch_path) as fh:
line = fh.readline()
assert line.startswith(
"Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and"
diff --git a/tests/test_download.py b/tests/test_download.py
index 7c9532e977..7f34f7fbc6 100644
--- a/tests/test_download.py
+++ b/tests/test_download.py
@@ -1,7 +1,6 @@
"""Tests for the download subcommand of nf-core tools
"""
-import hashlib
import os
import re
import shutil
@@ -16,9 +15,9 @@
import nf_core.utils
from nf_core.download import ContainerError, DownloadWorkflow, WorkflowRepo
from nf_core.synced_repo import SyncedRepo
-from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, run_cmd
+from nf_core.utils import run_cmd
-from .utils import with_temporary_file, with_temporary_folder
+from .utils import with_temporary_folder
class DownloadTest(unittest.TestCase):
@@ -160,8 +159,8 @@ def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_co
if result is not None:
nfconfig_raw, _ = result
config = {}
- for l in nfconfig_raw.splitlines():
- ul = l.decode("utf-8")
+ for line in nfconfig_raw.splitlines():
+ ul = line.decode("utf-8")
try:
k, v = ul.split(" = ", 1)
config[k] = v.strip("'\"")
@@ -259,7 +258,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p
)
# Pull again, but now the image already exists
- with pytest.raises(ContainerError.ImageExists):
+ with pytest.raises(ContainerError.ImageExistsError):
download_obj.singularity_pull_image(
"hello-world", f"{tmp_dir}/hello-world.sif", None, "docker.io", mock_rich_progress
)
@@ -269,8 +268,8 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p
"docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress
)
- # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExists is raised before attempting to pull.)
- with pytest.raises(ContainerError.RegistryNotFound):
+ # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.)
+ with pytest.raises(ContainerError.RegistryNotFoundError):
download_obj.singularity_pull_image(
"hello-world",
f"{tmp_dir}/hello-world_new.sif",
@@ -280,23 +279,23 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p
)
# test Image not found for several registries
- with pytest.raises(ContainerError.ImageNotFound):
+ with pytest.raises(ContainerError.ImageNotFoundError):
download_obj.singularity_pull_image(
"a-container", f"{tmp_dir}/acontainer.sif", None, "quay.io", mock_rich_progress
)
- with pytest.raises(ContainerError.ImageNotFound):
+ with pytest.raises(ContainerError.ImageNotFoundError):
download_obj.singularity_pull_image(
"a-container", f"{tmp_dir}/acontainer.sif", None, "docker.io", mock_rich_progress
)
- with pytest.raises(ContainerError.ImageNotFound):
+ with pytest.raises(ContainerError.ImageNotFoundError):
download_obj.singularity_pull_image(
"a-container", f"{tmp_dir}/acontainer.sif", None, "ghcr.io", mock_rich_progress
)
# test Image not found for absolute URI.
- with pytest.raises(ContainerError.ImageNotFound):
+ with pytest.raises(ContainerError.ImageNotFoundError):
download_obj.singularity_pull_image(
"docker.io/bschiffthaler/nothingtopullhere",
f"{tmp_dir}/nothingtopullhere.sif",
@@ -306,7 +305,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p
)
# Traffic from Github Actions to GitHub's Container Registry is unlimited, so no harm should be done here.
- with pytest.raises(ContainerError.InvalidTag):
+ with pytest.raises(ContainerError.InvalidTagError):
download_obj.singularity_pull_image(
"ewels/multiqc:go-rewrite",
f"{tmp_dir}/umi-transfer.sif",
@@ -343,9 +342,8 @@ def test_get_singularity_images(self, tmp_path, mock_fetch_wf_config):
container_library=("mirage-the-imaginative-registry.io", "quay.io", "ghcr.io", "docker.io"),
)
mock_fetch_wf_config.return_value = {
- "process.mapping.container": "helloworld",
- "process.mapping.container": "helloworld",
- "process.mapping.container": "helloooooooworld",
+ "process.helloworld.container": "helloworld",
+ "process.hellooworld.container": "helloooooooworld",
"process.mapping.container": "ewels/multiqc:gorewrite",
}
download_obj.find_container_images("workflow")
diff --git a/tests/test_launch.py b/tests/test_launch.py
index 03c6a8b692..dc8d6b147c 100644
--- a/tests/test_launch.py
+++ b/tests/test_launch.py
@@ -86,7 +86,7 @@ def test_get_pipeline_defaults(self):
self.launcher.get_pipeline_schema()
self.launcher.set_schema_inputs()
assert len(self.launcher.schema_obj.input_params) > 0
- assert self.launcher.schema_obj.input_params["validate_params"] == True
+ assert self.launcher.schema_obj.input_params["validate_params"] is True
@with_temporary_file
def test_get_pipeline_defaults_input_params(self, tmp_file):
@@ -119,12 +119,12 @@ def test_ob_to_questionary_string(self):
@mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}])
def test_prompt_web_gui_true(self, mock_prompt):
"""Check the prompt to launch the web schema or use the cli"""
- assert self.launcher.prompt_web_gui() == True
+ assert self.launcher.prompt_web_gui() is True
@mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Command line"}])
def test_prompt_web_gui_false(self, mock_prompt):
"""Check the prompt to launch the web schema or use the cli"""
- assert self.launcher.prompt_web_gui() == False
+ assert self.launcher.prompt_web_gui() is False
@mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{}])
def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api):
@@ -144,7 +144,7 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa
"""Check the code that opens the web browser"""
self.launcher.get_pipeline_schema()
self.launcher.merge_nxf_flag_schema()
- assert self.launcher.launch_web_gui() == None
+ assert self.launcher.launch_web_gui() is None
@mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}])
def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api):
@@ -163,7 +163,7 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api):
@mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}])
def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api):
"""Test polling the website for a launch response - status waiting_for_user"""
- assert self.launcher.get_web_launch_response() == False
+ assert self.launcher.get_web_launch_response() is False
@mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}])
def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api):
@@ -191,7 +191,7 @@ def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api):
def test_get_web_launch_response_valid(self, mock_poll_nfcore_web_api, mock_sanitise):
"""Test polling the website for a launch response - complete, valid response"""
self.launcher.get_pipeline_schema()
- assert self.launcher.get_web_launch_response() == True
+ assert self.launcher.get_web_launch_response() is True
def test_sanitise_web_response(self):
"""Check that we can properly sanitise results from the web"""
@@ -201,7 +201,7 @@ def test_sanitise_web_response(self):
self.launcher.schema_obj.input_params["max_cpus"] = "12"
self.launcher.sanitise_web_response()
assert "-name" not in self.launcher.nxf_flags
- assert self.launcher.schema_obj.input_params["igenomes_ignore"] == True
+ assert self.launcher.schema_obj.input_params["igenomes_ignore"] is True
assert self.launcher.schema_obj.input_params["max_cpus"] == 12
def test_ob_to_questionary_bool(self):
@@ -216,12 +216,12 @@ def test_ob_to_questionary_bool(self):
assert result["message"] == ""
assert result["choices"] == ["True", "False"]
assert result["default"] == "True"
- assert result["filter"]("True") == True
- assert result["filter"]("true") == True
- assert result["filter"](True) == True
- assert result["filter"]("False") == False
- assert result["filter"]("false") == False
- assert result["filter"](False) == False
+ assert result["filter"]("True") is True
+ assert result["filter"]("true") is True
+ assert result["filter"](True) is True
+ assert result["filter"]("False") is False
+ assert result["filter"]("false") is False
+ assert result["filter"](False) is False
def test_ob_to_questionary_number(self):
"""Check converting a python dict to a pyenquirer format - with enum"""
@@ -234,7 +234,7 @@ def test_ob_to_questionary_number(self):
assert result["validate"]("") is True
assert result["validate"]("123.56.78") == "Must be a number"
assert result["validate"]("123.56sdkfjb") == "Must be a number"
- assert result["filter"]("123.456") == float(123.456)
+ assert result["filter"]("123.456") == 123.456
assert result["filter"]("") == ""
def test_ob_to_questionary_integer(self):
@@ -248,7 +248,7 @@ def test_ob_to_questionary_integer(self):
assert result["validate"]("") is True
assert result["validate"]("123.45") == "Must be an integer"
assert result["validate"]("123.56sdkfjb") == "Must be an integer"
- assert result["filter"]("123") == int(123)
+ assert result["filter"]("123") == 123
assert result["filter"]("") == ""
def test_ob_to_questionary_range(self):
@@ -321,7 +321,7 @@ def test_build_command_params(self):
== f'nextflow run {self.pipeline_dir} -params-file "{os.path.relpath(self.nf_params_fn)}"'
)
# Check saved parameters file
- with open(self.nf_params_fn, "r") as fh:
+ with open(self.nf_params_fn) as fh:
try:
saved_json = json.load(fh)
except json.JSONDecodeError as e:
diff --git a/tests/test_lint.py b/tests/test_lint.py
index b2e7f3b574..32913bda0d 100644
--- a/tests/test_lint.py
+++ b/tests/test_lint.py
@@ -56,9 +56,9 @@ def test_run_linting_function(self):
We don't really check any of this code as it's just a series of function calls
and we're testing each of those individually. This is mostly to check for syntax errors."""
- lint_obj = nf_core.lint.run_linting(self.test_pipeline_dir, False)
+ nf_core.lint.run_linting(self.test_pipeline_dir, False)
- def test_init_PipelineLint(self):
+ def test_init_pipeline_lint(self):
"""Simply create a PipelineLint object.
This checks that all of the lint test imports are working properly,
@@ -134,7 +134,7 @@ def test_json_output(self, tmp_dir):
self.lint_obj._save_json_results(json_fn)
# Load created JSON file and check its contents
- with open(json_fn, "r") as fh:
+ with open(json_fn) as fh:
try:
saved_json = json.load(fh)
except json.JSONDecodeError as e:
diff --git a/tests/test_modules.py b/tests/test_modules.py
index 92c8dfda3f..f7ada2a483 100644
--- a/tests/test_modules.py
+++ b/tests/test_modules.py
@@ -47,7 +47,7 @@ def create_modules_repo_dummy(tmp_dir):
# Remove doi from meta.yml which makes lint fail
meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml")
- with open(meta_yml_path, "r") as fh:
+ with open(meta_yml_path) as fh:
meta_yml = yaml.safe_load(fh)
del meta_yml["tools"][0]["bpipe"]["doi"]
with open(meta_yml_path, "w") as fh:
@@ -60,7 +60,7 @@ def create_modules_repo_dummy(tmp_dir):
# remove "TODO" statements from main.nf
main_nf_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "main.nf")
- with open(main_nf_path, "r") as fh:
+ with open(main_nf_path) as fh:
main_nf = fh.read()
main_nf = main_nf.replace("TODO", "")
with open(main_nf_path, "w") as fh:
@@ -68,7 +68,7 @@ def create_modules_repo_dummy(tmp_dir):
# remove "TODO" statements from main.nf.test
main_nf_test_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test")
- with open(main_nf_test_path, "r") as fh:
+ with open(main_nf_test_path) as fh:
main_nf_test = fh.read()
main_nf_test = main_nf_test.replace("TODO", "")
with open(main_nf_test_path, "w") as fh:
diff --git a/tests/test_params_file.py b/tests/test_params_file.py
index 824e8fe345..13c82f5188 100644
--- a/tests/test_params_file.py
+++ b/tests/test_params_file.py
@@ -31,7 +31,7 @@ def setup_class(cls):
cls.invalid_template_schema = os.path.join(cls.template_dir, "nextflow_schema_invalid.json")
# Remove the allOf section to make the schema invalid
- with open(cls.template_schema, "r") as fh:
+ with open(cls.template_schema) as fh:
o = json.load(fh)
del o["allOf"]
@@ -49,7 +49,7 @@ def test_build_template(self):
assert os.path.exists(outfile)
- with open(outfile, "r") as fh:
+ with open(outfile) as fh:
out = fh.read()
assert "nf-core/testpipeline" in out
@@ -68,7 +68,7 @@ def test_build_template_file_exists(self, caplog):
# Creates a new empty file
outfile = Path(self.tmp_dir) / "params-file.yml"
- with open(outfile, "w") as fp:
+ with open(outfile, "w"):
pass
res = self.params_template_builder.write_params_file(outfile)
diff --git a/tests/test_refgenie.py b/tests/test_refgenie.py
index 73fbcb863f..5440c1c477 100644
--- a/tests/test_refgenie.py
+++ b/tests/test_refgenie.py
@@ -7,8 +7,6 @@
import tempfile
import unittest
-import yaml
-
class TestRefgenie(unittest.TestCase):
"""Class for refgenie tests"""
@@ -26,7 +24,7 @@ def setUp(self):
# avoids adding includeConfig statement to config file outside the current tmpdir
try:
self.NXF_HOME_ORIGINAL = os.environ["NXF_HOME"]
- except:
+ except Exception:
self.NXF_HOME_ORIGINAL = None
os.environ["NXF_HOME"] = self.NXF_HOME
diff --git a/tests/test_schema.py b/tests/test_schema.py
index 105cd9473e..89fcc98b66 100644
--- a/tests/test_schema.py
+++ b/tests/test_schema.py
@@ -305,7 +305,7 @@ def test_build_schema(self):
Build a new schema param from a pipeline
Run code to ensure it doesn't crash. Individual functions tested separately.
"""
- param = self.schema_obj.build_schema(self.template_dir, True, False, None)
+ self.schema_obj.build_schema(self.template_dir, True, False, None)
@with_temporary_folder
def test_build_schema_from_scratch(self, tmp_dir):
@@ -319,7 +319,7 @@ def test_build_schema_from_scratch(self, tmp_dir):
shutil.copytree(self.template_dir, test_pipeline_dir)
os.remove(os.path.join(test_pipeline_dir, "nextflow_schema.json"))
- param = self.schema_obj.build_schema(test_pipeline_dir, True, False, None)
+ self.schema_obj.build_schema(test_pipeline_dir, True, False, None)
@mock.patch("requests.post")
def test_launch_web_builder_timeout(self, mock_post):
diff --git a/tests/test_sync.py b/tests/test_sync.py
index 597e4375d3..51a27653ab 100644
--- a/tests/test_sync.py
+++ b/tests/test_sync.py
@@ -44,7 +44,7 @@ def tearDown(self):
def test_inspect_sync_dir_notgit(self, tmp_dir):
"""Try syncing an empty directory"""
psync = nf_core.sync.PipelineSync(tmp_dir)
- with pytest.raises(nf_core.sync.SyncException) as exc_info:
+ with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info:
psync.inspect_sync_dir()
assert "does not appear to be a git repository" in exc_info.value.args[0]
@@ -56,7 +56,7 @@ def test_inspect_sync_dir_dirty(self):
# Try to sync, check we halt with the right error
psync = nf_core.sync.PipelineSync(self.pipeline_dir)
try:
- with pytest.raises(nf_core.sync.SyncException) as exc_info:
+ with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info:
psync.inspect_sync_dir()
assert exc_info.value.args[0].startswith("Uncommitted changes found in pipeline directory!")
finally:
@@ -66,7 +66,7 @@ def test_get_wf_config_no_branch(self):
"""Try getting a workflow config when the branch doesn't exist"""
# Try to sync, check we halt with the right error
psync = nf_core.sync.PipelineSync(self.pipeline_dir, from_branch="foo")
- with pytest.raises(nf_core.sync.SyncException) as exc_info:
+ with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info:
psync.inspect_sync_dir()
psync.get_wf_config()
assert exc_info.value.args[0] == "Branch `foo` not found!"
@@ -76,7 +76,7 @@ def test_get_wf_config_missing_required_config(self):
# Try to sync, check we halt with the right error
psync = nf_core.sync.PipelineSync(self.pipeline_dir)
psync.required_config_vars = ["fakethisdoesnotexist"]
- with pytest.raises(nf_core.sync.SyncException) as exc_info:
+ with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info:
psync.inspect_sync_dir()
psync.get_wf_config()
# Check that we did actually get some config back
@@ -99,7 +99,7 @@ def test_checkout_template_branch_no_template(self):
psync.repo.delete_head("TEMPLATE")
- with pytest.raises(nf_core.sync.SyncException) as exc_info:
+ with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info:
psync.checkout_template_branch()
assert exc_info.value.args[0] == "Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'"
@@ -165,7 +165,7 @@ def test_push_template_branch_error(self):
test_fn.touch()
psync.commit_template_changes()
# Try to push changes
- with pytest.raises(nf_core.sync.PullRequestException) as exc_info:
+ with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info:
psync.push_template_branch()
assert exc_info.value.args[0].startswith("Could not push TEMPLATE branch")
@@ -220,7 +220,7 @@ def test_push_merge_branch_without_create_branch(self):
psync.get_wf_config()
psync.repo.create_remote("origin", self.remote_path)
- with pytest.raises(nf_core.sync.PullRequestException) as exc_info:
+ with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info:
psync.push_merge_branch()
assert exc_info.value.args[0].startswith(f"Could not push branch '{psync.merge_branch}'")
@@ -329,7 +329,7 @@ def test_make_pull_request_bad_response(self, mock_post, mock_get):
psync.gh_username = "bad_url"
psync.gh_repo = "bad_url/response"
os.environ["GITHUB_AUTH_TOKEN"] = "test"
- with pytest.raises(nf_core.sync.PullRequestException) as exc_info:
+ with pytest.raises(nf_core.sync.PullRequestExceptionError) as exc_info:
psync.make_pull_request()
assert exc_info.value.args[0].startswith(
"Something went badly wrong - GitHub API PR failed - got return code 404"
@@ -420,6 +420,6 @@ def test_reset_target_dir_fake_branch(self):
psync.original_branch = "fake_branch"
- with pytest.raises(nf_core.sync.SyncException) as exc_info:
+ with pytest.raises(nf_core.sync.SyncExceptionError) as exc_info:
psync.reset_target_dir()
assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`")
diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py
index 154a31fca6..c7088b9282 100644
--- a/tests/test_test_utils.py
+++ b/tests/test_test_utils.py
@@ -1,8 +1,5 @@
-import tempfile
from pathlib import Path
-import pytest
-
from .utils import with_temporary_file, with_temporary_folder
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 90d1886dbd..3079d75808 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -134,7 +134,7 @@ def test_request_cant_create_cache(self, mock_mkd, mock_exists):
def test_pip_package_pass(self):
result = nf_core.utils.pip_package("multiqc=1.10")
- assert type(result) == dict
+ assert isinstance(result, dict)
@mock.patch("requests.get")
def test_pip_package_timeout(self, mock_get):
diff --git a/tests/utils.py b/tests/utils.py
index 198ac3d583..89c1328818 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -5,9 +5,7 @@
import functools
import os
import tempfile
-from contextlib import contextmanager
-from pathlib import Path
-from typing import Any, Callable, Generator, Tuple
+from typing import Any, Callable, Tuple
import responses
@@ -27,7 +25,7 @@
GITLAB_BRANCH_ORG_PATH_BRANCH = "org-path"
GITLAB_BRANCH_TEST_OLD_SHA = "e772abc22c1ff26afdf377845c323172fb3c19ca"
GITLAB_BRANCH_TEST_NEW_SHA = "7d73e21f30041297ea44367f2b4fd4e045c0b991"
-GITLAB_NFTEST_BRANCH = "nf-test-tests"
+GITLAB_NFTEST_BRANCH = "nf-test-tests-self-hosted-runners"
def with_temporary_folder(func: Callable[..., Any]) -> Callable[..., Any]: