diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index 7061294783..8e253bb3eb 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -97,7 +97,7 @@ jobs: run: | pushd nf-core/${{ matrix.pipeline }} - defaultBranch=$(grep -B5 -A5 "nextflowVersion" nextflow.config | grep "defaultBranch" | cut -d"=" -f2) + defaultBranch=$(grep -B5 -A5 "nextflowVersion" nextflow.config | grep "defaultBranch" | cut -d"=" -f2 | sed "s/'//g") if [ -z "$defaultBranch" ]; then defaultBranch="master" fi diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 68a6fa3ed7..1827289b76 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.2 + rev: v0.8.6 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix @@ -19,7 +19,7 @@ repos: alias: ec - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.13.0" + rev: "v1.14.1" hooks: - id: mypy additional_dependencies: diff --git a/CHANGELOG.md b/CHANGELOG.md index 926535077d..1197a379b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,28 @@ # nf-core/tools: Changelog +## [v3.1.2 - Brass Boxfish Patch](https://github.com/nf-core/tools/releases/tag/3.1.2) - [2025-01-20] + +### Template + +- Bump nf-schema to `2.3.0` ([#3401](https://github.com/nf-core/tools/pull/3401)) +- Remove jinja formatting which was deleting line breaks ([#3405](https://github.com/nf-core/tools/pull/3405)) + +### Download + +- Allow `nf-core pipelines download -r` to download commits ([#3374](https://github.com/nf-core/tools/pull/3374)) +- Fix faulty Download Test Action to ensure that setup and test run as one job and on the same runner ([#3389](https://github.com/nf-core/tools/pull/3389)) + +### Modules + +- Fix bump-versions: only append module name if it is a dir and contains `main.nf` ([#3384](https://github.com/nf-core/tools/pull/3384)) + +### General + +- `manifest.author` is not required anymore ([#3397](https://github.com/nf-core/tools/pull/3397)) +- Parameters schema validation: allow `oneOf`, `anyOf` and `allOf` with `required` ([#3386](https://github.com/nf-core/tools/pull/3386)) +- Run pre-comit when rendering template for pipelines sync ([#3371](https://github.com/nf-core/tools/pull/3371)) +- Fix sync GHA by removing quotes from parsed branch name ([#3394](https://github.com/nf-core/tools/pull/3394)) + ## [v3.1.1 - Brass Boxfish Patch](https://github.com/nf-core/tools/releases/tag/3.1.1) - [2024-12-20] ### Template diff --git a/Dockerfile b/Dockerfile index f2141145b8..f5a6796a27 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim@sha256:2b0079146a74e23bf4ae8f6a28e1b484c6292f6fb904cbb51825b4a19812fcd8 +FROM python:3.12-slim@sha256:10f3aaab98db50cba827d3b33a91f39dc9ec2d02ca9b85cbc5008220d07b17f3 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index ecfe5f24ee..0f42d1bcea 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -65,19 +65,20 @@ def get_installed_modules(directory: Path, repo_type="modules") -> Tuple[List[st local_modules = sorted([x for x in local_modules if x.endswith(".nf")]) # Get nf-core modules - if os.path.exists(nfcore_modules_dir): - for m in sorted([m for m in os.listdir(nfcore_modules_dir) if not m == "lib"]): - if not os.path.isdir(os.path.join(nfcore_modules_dir, m)): + if nfcore_modules_dir.exists(): + for m in sorted([m for m in nfcore_modules_dir.iterdir() if not m == "lib"]): + if not m.is_dir(): raise ModuleExceptionError( f"File found in '{nfcore_modules_dir}': '{m}'! This directory should only contain module directories." ) - m_content = os.listdir(os.path.join(nfcore_modules_dir, m)) + m_content = [d.name for d in m.iterdir()] # Not a module, but contains sub-modules if "main.nf" not in m_content: for tool in m_content: - nfcore_modules_names.append(os.path.join(m, tool)) + if (m / tool).is_dir() and "main.nf" in [d.name for d in (m / tool).iterdir()]: + nfcore_modules_names.append(str(Path(m.name, tool))) else: - nfcore_modules_names.append(m) + nfcore_modules_names.append(m.name) # Make full (relative) file paths and create NFCoreComponent objects if local_modules_dir: diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 9db393d9f0..f3a1073843 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -46,6 +46,8 @@ jobs: steps: - name: Check out pipeline code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + with: + fetch-depth: 0 - name: Set up Nextflow uses: nf-core/setup-nextflow@v2 diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index f270dc5411..977b4255dc 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -35,7 +35,18 @@ jobs: REPOTITLE_LOWERCASE: ${{ steps.get_repo_properties.outputs.REPOTITLE_LOWERCASE }} REPO_BRANCH: ${{ steps.get_repo_properties.outputs.REPO_BRANCH }} steps: - - name: Install Nextflow{% endraw %} + - name: Get the repository name and current branch + id: get_repo_properties + run: | + echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> "$GITHUB_OUTPUT" + echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> "$GITHUB_OUTPUT" + echo "REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> "$GITHUB_OUTPUT{% endraw %}" + + download: + runs-on: ubuntu-latest + needs: configure + steps: + - name: Install Nextflow uses: nf-core/setup-nextflow@v2 - name: Disk space cleanup @@ -56,24 +67,13 @@ jobs: python -m pip install --upgrade pip pip install git+https://github.com/nf-core/tools.git@dev - - name: Get the repository name and current branch set as environment variable - id: get_repo_properties - run: | - echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> "$GITHUB_OUTPUT" - echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> "$GITHUB_OUTPUT" - echo "{% raw %}REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> "$GITHUB_OUTPUT" - - name: Make a cache directory for the container images run: | mkdir -p ./singularity_container_images - download: - runs-on: ubuntu-latest - needs: configure - steps: - name: Download the pipeline env: - NXF_SINGULARITY_CACHEDIR: ./singularity_container_images + NXF_SINGULARITY_CACHEDIR: ./singularity_container_images{% raw %} run: | nf-core pipelines download ${{ needs.configure.outputs.REPO_LOWERCASE }} \ --revision ${{ needs.configure.outputs.REPO_BRANCH }} \ @@ -85,7 +85,10 @@ jobs: --download-configuration 'yes' - name: Inspect download - run: tree ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }}{% endraw %}{% if test_config %}{% raw %} + run: tree ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }}{% endraw %} + + - name: Inspect container images + run: tree ./singularity_container_images | tee ./container_initial{% if test_config %}{% raw %} - name: Count the downloaded number of container images id: count_initial @@ -123,7 +126,8 @@ jobs: final_count=${{ steps.count_afterwards.outputs.IMAGE_COUNT_AFTER }} difference=$((final_count - initial_count)) echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!" - tree ./singularity_container_images + tree ./singularity_container_images > ./container_afterwards + diff ./container_initial ./container_afterwards exit 1 else echo "The pipeline can be downloaded successfully!" diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index c355fd6129..16da9a4207 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -18,7 +18,7 @@ {%- endif %} -{%- if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) +{% if multiqc %}- [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 4cd41de368..33015da914 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -7,7 +7,7 @@ -{%- else -%} +{% else -%} # {{ name }} @@ -54,7 +54,7 @@ ## Usage > [!NOTE] -> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. {%- if test_config %}Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data.{% endif %} +> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. {% if test_config %}Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data.{% endif %} -{%- if citations %} +{% if citations %} An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. {%- endif %} diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index bea6f670d0..ebe720f295 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -27,7 +27,8 @@ params { // TODO nf-core: Give any required params for the test so that command line flags are not needed input = params.pipelines_testdata_base_path + 'viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv' - {% if igenomes -%} + {%- if igenomes -%} + // Genome references genome = 'R64-1-1' {%- endif %} diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 3325af4e06..5bbe47d9d9 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -295,7 +295,7 @@ manifest { {% if nf_schema -%} // Nextflow plugins plugins { - id 'nf-schema@2.1.1' // Validation of pipeline parameters and creation of an input channel from a sample sheet + id 'nf-schema@2.3.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet } validation { diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 4f90ca17f9..86ac022772 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -386,6 +386,10 @@ def render_template(self) -> None: yaml.dump(config_yml.model_dump(exclude_none=True), fh, Dumper=custom_yaml_dumper()) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") + # Run prettier on files for pipelines sync + log.debug("Running prettier on pipeline files") + run_prettier_on_file([str(f) for f in self.outdir.glob("**/*")]) + def fix_linting(self): """ Updates the .nf-core.yml with linting configurations diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 11adebce2c..c0f6e8c2f3 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -374,22 +374,27 @@ def prompt_revision(self) -> None: raise AssertionError(f"No revisions of {self.pipeline} available for download.") def get_revision_hash(self): - """Find specified revision / branch hash""" + """Find specified revision / branch / commit hash""" for revision in self.revision: # revision is a list of strings, but may be of length 1 # Branch if revision in self.wf_branches.keys(): self.wf_sha = {**self.wf_sha, revision: self.wf_branches[revision]} - # Revision else: + # Revision for r in self.wf_revisions: if r["tag_name"] == revision: self.wf_sha = {**self.wf_sha, revision: r["tag_sha"]} break - # Can't find the revisions or branch - throw an error else: + # Commit - full or short hash + if commit_id := nf_core.utils.get_repo_commit(self.pipeline, revision): + self.wf_sha = {**self.wf_sha, revision: commit_id} + continue + + # Can't find the revisions or branch - throw an error log.info( "Available {} revisions: '{}'".format( self.pipeline, @@ -397,7 +402,9 @@ def get_revision_hash(self): ) ) log.info("Available {} branches: '{}'".format(self.pipeline, "', '".join(self.wf_branches.keys()))) - raise AssertionError(f"Not able to find revision / branch '{revision}' for {self.pipeline}") + raise AssertionError( + f"Not able to find revision / branch / commit '{revision}' for {self.pipeline}" + ) # Set the outdir if not self.outdir: @@ -1536,7 +1543,7 @@ def singularity_pull_image( progress.remove_task(task) - def compress_download(self) -> None: + def compress_download(self): """Take the downloaded files and make a compressed .tar.gz archive.""" log.debug(f"Creating archive: {self.output_filename}") diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index c1c3acd31f..4dcab3b657 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -1,6 +1,7 @@ import filecmp import logging import os +import re import shutil import tempfile from pathlib import Path @@ -68,7 +69,10 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: could_fix: bool = False # Check that we have the minimum required config - required_pipeline_config = {"manifest.name", "manifest.description", "manifest.author"} + required_pipeline_config = { + "manifest.name", + "manifest.description", + } # TODO: add "manifest.contributors" when minimum nextflow version is >=24.10.0 missing_pipeline_config = required_pipeline_config.difference(self.nf_config) if missing_pipeline_config: return {"ignored": [f"Required pipeline config not found - {missing_pipeline_config}"]} @@ -117,10 +121,15 @@ def files_unchanged(self) -> Dict[str, Union[List[str], bool]]: tmp_dir.mkdir(parents=True) # Create a template.yaml file for the pipeline creation + if "manifest.author" in self.nf_config: + names = self.nf_config["manifest.author"].strip("\"'") + if "manifest.contributors" in self.nf_config: + contributors = self.nf_config["manifest.contributors"] + names = ", ".join(re.findall(r"name:'([^']+)'", contributors)) template_yaml = { "name": short_name, "description": self.nf_config["manifest.description"].strip("\"'"), - "author": self.nf_config["manifest.author"].strip("\"'"), + "author": names, "org": prefix, } diff --git a/nf_core/pipelines/lint_utils.py b/nf_core/pipelines/lint_utils.py index a6b98b1899..d41cf16b12 100644 --- a/nf_core/pipelines/lint_utils.py +++ b/nf_core/pipelines/lint_utils.py @@ -70,6 +70,15 @@ def print_fixes(lint_obj): ) +def check_git_repo() -> bool: + """Check if the current directory is a git repository.""" + try: + subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"]) + return True + except subprocess.CalledProcessError: + return False + + def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: """Run the pre-commit hook prettier on a file. @@ -80,6 +89,8 @@ def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: If Prettier is not installed, a warning is logged. """ + is_git = check_git_repo() + nf_core_pre_commit_config = Path(nf_core.__file__).parent / ".pre-commit-prettier-config.yaml" args = ["pre-commit", "run", "--config", str(nf_core_pre_commit_config), "prettier"] if isinstance(file, List): @@ -87,21 +98,24 @@ def run_prettier_on_file(file: Union[Path, str, List[str]]) -> None: else: args.extend(["--files", str(file)]) - try: - subprocess.run(args, capture_output=True, check=True) - log.debug(f"${subprocess.STDOUT}") - except subprocess.CalledProcessError as e: - if ": SyntaxError: " in e.stdout.decode(): - log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") - elif "files were modified by this hook" in e.stdout.decode(): - all_lines = [line for line in e.stdout.decode().split("\n")] - files = "\n".join(all_lines[3:]) - log.debug(f"The following files were modified by prettier:\n {files}") - else: - log.warning( - "There was an error running the prettier pre-commit hook.\n" - f"STDOUT: {e.stdout.decode()}\nSTDERR: {e.stderr.decode()}" - ) + if is_git: + try: + proc = subprocess.run(args, capture_output=True, check=True) + log.debug(f"{proc.stdout.decode()}") + except subprocess.CalledProcessError as e: + if ": SyntaxError: " in e.stdout.decode(): + log.critical(f"Can't format {file} because it has a syntax error.\n{e.stdout.decode()}") + elif "files were modified by this hook" in e.stdout.decode(): + all_lines = [line for line in e.stdout.decode().split("\n")] + files = "\n".join(all_lines[3:]) + log.debug(f"The following files were modified by prettier:\n {files}") + else: + log.warning( + "There was an error running the prettier pre-commit hook.\n" + f"STDOUT: {e.stdout.decode()}\nSTDERR: {e.stderr.decode()}" + ) + else: + log.debug("Not in a git repository, skipping pre-commit hook.") def dump_json_with_prettier(file_name, file_content): diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index f87cc7d8d2..b0f9611f1f 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -287,7 +287,7 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: try: git_contributors: Set[str] = set() if self.pipeline_obj.repo is None: - log.info("No git repository found. No git contributors will be added as authors.") + log.debug("No git repository found. No git contributors will be added as authors.") return commits_touching_path = list(self.pipeline_obj.repo.iter_commits(paths="main.nf")) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index a08dd0a2d0..b425ec64ed 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -327,11 +327,32 @@ def validate_default_params(self): schema_no_required = copy.deepcopy(self.schema) if "required" in schema_no_required: schema_no_required.pop("required") + for keyword in ["allOf", "anyOf", "oneOf"]: + if keyword in schema_no_required: + for i, kw_content in enumerate(schema_no_required[keyword]): + if "required" in kw_content: + schema_no_required[keyword][i].pop("required") + schema_no_required[keyword] = [ + kw_content for kw_content in schema_no_required[keyword] if kw_content + ] + if not schema_no_required[keyword]: + schema_no_required.pop(keyword) for group_key, group in schema_no_required.get(self.defs_notation, {}).items(): if "required" in group: schema_no_required[self.defs_notation][group_key].pop("required") + for keyword in ["allOf", "anyOf", "oneOf"]: + if keyword in group: + for i, kw_content in enumerate(group[keyword]): + if "required" in kw_content: + schema_no_required[self.defs_notation][group_key][keyword][i].pop("required") + schema_no_required[self.defs_notation][group_key][keyword] = [ + kw_content for kw_content in group[keyword] if kw_content + ] + if not group[keyword]: + schema_no_required[self.defs_notation][group_key].pop(keyword) jsonschema.validate(self.schema_defaults, schema_no_required) except jsonschema.exceptions.ValidationError as e: + log.debug(f"Complete error message:\n{e}") raise AssertionError(f"Default parameters are invalid: {e.message}") for param, default in self.schema_defaults.items(): if default in ("null", "", None, "None") or default is False: diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 781b4f5f00..14365da3f8 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -81,7 +81,11 @@ def __init__( self.made_changes = False self.make_pr = make_pr self.gh_pr_returned_data: Dict = {} - self.required_config_vars = ["manifest.name", "manifest.description", "manifest.version", "manifest.author"] + self.required_config_vars = [ + "manifest.name", + "manifest.description", + "manifest.version", + ] # TODO: add "manifest.contributors" when minimum nextflow version is >=24.10.0 self.force_pr = force_pr self.gh_username = gh_username diff --git a/nf_core/utils.py b/nf_core/utils.py index e2b61329cc..27334d473c 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1096,6 +1096,26 @@ def get_repo_releases_branches(pipeline, wfs): return pipeline, wf_releases, wf_branches +def get_repo_commit(pipeline, commit_id): + """Check if the repo contains the requested commit_id, and expand it to long form if necessary. + + Args: + pipeline (str): GitHub repo username/repo + commit_id: The requested commit ID (SHA). It can be in standard long/short form, or any length. + + Returns: + commit_id: String or None + """ + + commit_response = gh_api.get( + f"https://api.github.com/repos/{pipeline}/commits/{commit_id}", headers={"Accept": "application/vnd.github.sha"} + ) + if commit_response.status_code == 200: + return commit_response.text + else: + return None + + CONFIG_PATHS = [".nf-core.yml", ".nf-core.yaml"] DEPRECATED_CONFIG_PATHS = [".nf-core-lint.yml", ".nf-core-lint.yaml"] @@ -1332,8 +1352,10 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] contributors = wf_config["manifest.contributors"] names = re.findall(r"name:'([^']+)'", contributors) author_names = ", ".join(names) - else: + elif "manifest.author" in wf_config: author_names = wf_config["manifest.author"].strip("'\"") + else: + author_names = None if nf_core_yaml_config.template is None: # The .nf-core.yml file did not contain template information nf_core_yaml_config.template = NFCoreTemplateConfig( diff --git a/setup.py b/setup.py index 5617520e91..fb1621adfc 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.1.1" +version = "3.1.2" with open("README.md") as f: readme = f.read() diff --git a/tests/modules/test_modules_utils.py b/tests/modules/test_modules_utils.py new file mode 100644 index 0000000000..763725337b --- /dev/null +++ b/tests/modules/test_modules_utils.py @@ -0,0 +1,20 @@ +import nf_core.modules.modules_utils + +from ..test_modules import TestModules + + +class TestModulesUtils(TestModules): + def test_get_installed_modules(self): + """Test getting installed modules""" + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.nfcore_modules) + assert len(nfcore_modules) == 1 + assert nfcore_modules[0].component_name == "bpipe/test" + + def test_get_installed_modules_with_files(self): + """Test getting installed modules. When a module contains a file in its directory, it shouldn't be picked up as a tool/subtool""" + # Create a file in the module directory + with open(self.nfcore_modules / "modules" / "nf-core" / "bpipe" / "test_file.txt", "w") as fh: + fh.write("test") + + _, nfcore_modules = nf_core.modules.modules_utils.get_installed_modules(self.nfcore_modules) + assert len(nfcore_modules) == 1 diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index d1e2c41a68..6db7392107 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -81,6 +81,50 @@ def test_get_release_hash_branch(self): == "https://github.com/nf-core/exoseq/archive/819cbac792b76cf66c840b567ed0ee9a2f620db7.zip" ) + def test_get_release_hash_long_commit(self): + wfs = nf_core.pipelines.list.Workflows() + wfs.get_remote_workflows() + # Exoseq pipeline is archived, so `dev` branch should be stable + pipeline = "exoseq" + revision = "819cbac792b76cf66c840b567ed0ee9a2f620db7" + + download_obj = DownloadWorkflow(pipeline=pipeline, revision=revision) + ( + download_obj.pipeline, + download_obj.wf_revisions, + download_obj.wf_branches, + ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) + download_obj.get_revision_hash() + assert download_obj.wf_sha[download_obj.revision[0]] == revision + assert download_obj.outdir == f"nf-core-exoseq_{revision}" + assert ( + download_obj.wf_download_url[download_obj.revision[0]] + == f"https://github.com/nf-core/exoseq/archive/{revision}.zip" + ) + + def test_get_release_hash_short_commit(self): + wfs = nf_core.pipelines.list.Workflows() + wfs.get_remote_workflows() + # Exoseq pipeline is archived, so `dev` branch should be stable + pipeline = "exoseq" + revision = "819cbac792b76cf66c840b567ed0ee9a2f620db7" + short_rev = revision[:7] + + download_obj = DownloadWorkflow(pipeline="exoseq", revision=short_rev) + ( + download_obj.pipeline, + download_obj.wf_revisions, + download_obj.wf_branches, + ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) + download_obj.get_revision_hash() + print(download_obj) + assert download_obj.wf_sha[download_obj.revision[0]] == revision + assert download_obj.outdir == f"nf-core-exoseq_{short_rev}" + assert ( + download_obj.wf_download_url[download_obj.revision[0]] + == f"https://github.com/nf-core/exoseq/archive/{revision}.zip" + ) + def test_get_release_hash_non_existent_release(self): wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() diff --git a/tests/pipelines/test_schema.py b/tests/pipelines/test_schema.py index ab543d8b90..efc2798969 100644 --- a/tests/pipelines/test_schema.py +++ b/tests/pipelines/test_schema.py @@ -285,6 +285,89 @@ def test_remove_schema_notfound_configs_childschema(self): assert len(params_removed) == 1 assert "foo" in params_removed + def test_validate_defaults(self): + """Test validating default values""" + self.schema_obj.schema = { + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, + "required": ["foo"], + } + self.schema_obj.schema_defaults = {"foo": "foo", "bar": "bar"} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_required(self): + """Test validating default values when required params don't have a default""" + self.schema_obj.schema = { + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, + "required": ["foo"], + } + self.schema_obj.schema_defaults = {} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_required_inside_group(self): + """Test validating default values when required params don't have a default, inside a group""" + self.schema_obj.schema = { + "$defs": { + "subSchemaId": { + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, + "required": ["foo"], + }, + } + } + self.schema_obj.schema_defaults = {} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_required_inside_group_with_anyof(self): + """Test validating default values when required params don't have a default, inside a group with anyOf""" + self.schema_obj.schema = { + "$defs": { + "subSchemaId": { + "anyOf": [{"required": ["foo"]}, {"required": ["bar"]}], + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}}, + }, + } + } + self.schema_obj.schema_defaults = {} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_required_with_anyof(self): + """Test validating default values when required params don't have a default, with anyOf""" + self.schema_obj.schema = { + "properties": {"foo": {"type": "string"}, "bar": {"type": "string"}, "baz": {"type": "string"}}, + "anyOf": [{"required": ["foo"]}, {"required": ["bar"]}], + } + self.schema_obj.schema_defaults = {"baz": "baz"} + self.schema_obj.no_prompts = True + try: + self.schema_obj.validate_default_params() + except AssertionError: + self.fail("Error validating schema defaults") + + def test_validate_defaults_error(self): + """Test validating default raises an exception when a default is not valid""" + self.schema_obj.schema = { + "properties": {"foo": {"type": "string"}}, + } + self.schema_obj.schema_defaults = {"foo": 1} + self.schema_obj.no_prompts = True + with self.assertRaises(AssertionError): + self.schema_obj.validate_default_params() + def test_add_schema_found_configs(self): """Try adding a new parameter to the schema from the config""" self.schema_obj.pipeline_params = {"foo": "bar"} diff --git a/tests/test_utils.py b/tests/test_utils.py index b13c8eb37d..b7761253a3 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -169,6 +169,17 @@ def test_get_repo_releases_branches_not_exists_slash(self): with pytest.raises(AssertionError): nf_core.utils.get_repo_releases_branches("made-up/pipeline", wfs) + def test_get_repo_commit(self): + # The input can be a commit in standard long/short form, but also any length as long as it can be uniquely resolved + revision = "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" + assert nf_core.utils.get_repo_commit("nf-core/methylseq", revision) == revision + assert nf_core.utils.get_repo_commit("nf-core/methylseq", revision[:16]) == revision + assert nf_core.utils.get_repo_commit("nf-core/methylseq", revision[:7]) == revision + assert nf_core.utils.get_repo_commit("nf-core/methylseq", revision[:6]) == revision + assert nf_core.utils.get_repo_commit("nf-core/methylseq", "xyz") is None + assert nf_core.utils.get_repo_commit("made_up_pipeline", "") is None + assert nf_core.utils.get_repo_commit("made-up/pipeline", "") is None + def test_validate_file_md5(self): # MD5(test) = d8e8fca2dc0f896fd7cb4cb0031ba249 test_file = TEST_DATA_DIR / "test.txt"