diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index fc73294b5a..a90c06f9a9 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -142,4 +142,3 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) -- [Dockerfile](.devcontainer/Dockerfile) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 1bd2e27fb0..8fdd2bd7e1 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -33,7 +33,7 @@ body: attributes: label: System information description: | - * Nextflow version _(eg. 22.10.1)_ + * Nextflow version _(eg. 23.04.0)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * OS _(eg. CentOS Linux, macOS, Linux Mint)_ diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 46152d92a7..654d248f79 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: NXF_VER: - - "22.10.1" + - "23.04.0" - "latest-everything" steps: # Get the repo code diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml new file mode 100644 index 0000000000..62ec65bcd5 --- /dev/null +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -0,0 +1,137 @@ +name: Create a pipeline from a template and test it +on: + push: + branches: + - dev + paths: + - nf_core/pipeline-template/** + pull_request: + release: + types: [published] + +# Cancel if a newer run is started +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + NXF_ANSI_LOG: false + +jobs: + RunTestWorkflow: + runs-on: ubuntu-latest + env: + NXF_ANSI_LOG: false + strategy: + matrix: + TEMPLATE: + - "template_skip_all.yml" + - "template_skip_github_badges.yml" + - "template_skip_igenomes.yml" + - "template_skip_ci.yml" + - "template_skip_nf_core_configs.yml" + + steps: + - uses: actions/checkout@v3 + name: Check out source-code repository + + - name: Set up Python 3.8 + uses: actions/setup-python@v4 + with: + python-version: 3.8 + + - name: Install python dependencies + run: | + python -m pip install --upgrade pip + pip install . + + - name: Install Nextflow + uses: nf-core/setup-nextflow@v1 + with: + version: latest-everything + + # Install the Prettier linting tools + - uses: actions/setup-node@v3 + + - name: Install Prettier + run: npm install -g prettier + + # Install the editorconfig linting tools + - name: Install editorconfig-checker + run: npm install -g editorconfig-checker + + # Create template files + - name: Create template skip all (except github) + run: | + printf "prefix: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > template_skip_all.yml + + - name: Create template skip github_badges + run: | + printf "prefix: my-prefix\nskip: github_badges" > template_skip_github_badges.yml + + - name: Create template skip igenomes + run: | + printf "prefix: my-prefix\nskip: igenomes" > template_skip_igenomes.yml + + - name: Create template skip ci + run: | + printf "prefix: my-prefix\nskip: ci" > template_skip_ci.yml + + - name: Create template skip nf_core_configs + run: | + printf "prefix: my-prefix\nskip: nf_core_configs" > template_skip_nf_core_configs.yml + + # Create a pipeline from the template + - name: create a pipeline from the template ${{ matrix.TEMPLATE }} + run: | + nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} + + - name: run the pipeline + run: | + nextflow run my-prefix-testpipeline -profile test,docker --outdir ./results + + # Remove results folder before linting + - name: remove results folder + run: | + rm -rf ./results + + # Try syncing it before we change anything + - name: nf-core sync + run: nf-core --log-file log.txt sync --dir my-prefix-testpipeline/ --template-yaml ${{ matrix.TEMPLATE }} + + # Run code style linting + - name: Run Prettier --check + run: prettier --check my-prefix-testpipeline + + - name: Run ECLint check + run: editorconfig-checker -exclude README.md $(find my-prefix-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + + # Remove TODO statements + - name: remove TODO + run: find my-prefix-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; + + # Replace zenodo.XXXXXX to pass readme linting + - name: replace zenodo.XXXXXX + run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; + + # Run nf-core linting + - name: nf-core lint + run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned + + # Run bump-version + - name: nf-core bump-version + run: nf-core --log-file log.txt bump-version --dir my-prefix-testpipeline/ 1.1 + + # Run nf-core linting in release mode + - name: nf-core lint in release mode + run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned --release + + - name: Tar files + run: tar -cvf artifact_files.tar log.txt template_skip*.yml + + - name: Upload log file artifact + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: nf-core-log-file + path: artifact_files.tar diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 8f3c5fdb47..7cff154a08 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: NXF_VER: - - "22.10.1" + - "23.04.0" - "latest-everything" steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/pytest-frozen-ubuntu-20.04.yml b/.github/workflows/pytest-frozen-ubuntu-20.04.yml index b015376633..5faf8ce605 100644 --- a/.github/workflows/pytest-frozen-ubuntu-20.04.yml +++ b/.github/workflows/pytest-frozen-ubuntu-20.04.yml @@ -15,7 +15,7 @@ concurrency: cancel-in-progress: true jobs: - pytest: + pytest-frozen: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v3 diff --git a/.gitpod.yml b/.gitpod.yml index 263fcc41db..0cc1006299 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -5,6 +5,7 @@ tasks: python -m pip install -e . python -m pip install -r requirements-dev.txt pre-commit install --install-hooks + nextflow self-update vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/CHANGELOG.md b/CHANGELOG.md index 8921d75fea..11b6da2e6b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,73 @@ # nf-core/tools: Changelog +# [v2.9 - Chromium Falcon](https://github.com/nf-core/tools/releases/tag/2.9) + [2023-06-29] + +### Template + +- `params.max_multiqc_email_size` is no longer required ([#2273](https://github.com/nf-core/tools/pull/2273)) +- Remove `cleanup = true` from `test_full.config` in pipeline template ([#2279](https://github.com/nf-core/tools/pull/2279)) +- Fix usage docs for specifying `params.yaml` ([#2279](https://github.com/nf-core/tools/pull/2279)) +- Added stub in modules template ([#2277])(https://github.com/nf-core/tools/pull/2277) [Contributed by @nvnieuwk] +- Move registry definitions out of profile scope ([#2286])(https://github.com/nf-core/tools/pull/2286) +- Remove `aws_tower` profile ([#2287])(https://github.com/nf-core/tools/pull/2287) +- Fixed the Slack report to include the pipeline name ([#2291](https://github.com/nf-core/tools/pull/2291)) +- Fix link in the MultiQC report to point to exact version of output docs ([#2298](https://github.com/nf-core/tools/pull/2298)) +- Updates seqeralabs/action-tower-launch to v2.0.0 ([#2301](https://github.com/nf-core/tools/pull/2301)) +- Remove schema validation from `lib` folder and use Nextflow [nf-validation plugin](https://nextflow-io.github.io/nf-validation/) instead ([#1771](https://github.com/nf-core/tools/pull/1771/)) +- Fix parsing of container directive when it is not typical nf-core format ([#2306](https://github.com/nf-core/tools/pull/2306)) +- Add ability to specify custom registry for linting modules, defaults to quay.io ([#2313](https://github.com/nf-core/tools/pull/2313)) +- Add `singularity.registry = 'quay.io'` in pipeline template ([#2305](https://github.com/nf-core/tools/pull/2305)) +- Add `apptainer.registry = 'quay.io'` in pipeline template ([#2352](https://github.com/nf-core/tools/pull/2352)) +- Bump minimum required NF version in pipeline template from `22.10.1` -> `23.04.0` ([#2305](https://github.com/nf-core/tools/pull/2305)) +- Add ability to interpret `docker.registry` from `nextflow.config` file. If not found defaults to quay.io. ([#2318](https://github.com/nf-core/tools/pull/2318)) +- Add functions to dynamically include pipeline tool citations in MultiQC methods description section for better reporting. ([#2326](https://github.com/nf-core/tools/pull/2326)) +- Remove `--tracedir` parameter ([#2290](https://github.com/nf-core/tools/pull/2290)) +- Incorrect config parameter warnings when customising pipeline template ([#2333](https://github.com/nf-core/tools/pull/2333)) +- Use markdown syntax in the description for the meta map channels ([#2358](https://github.com/nf-core/tools/pull/2358)) + +### Download + +- Introduce a `--tower` flag for `nf-core download` to obtain pipelines in an offline format suited for [seqeralabs® Nextflow Tower](https://cloud.tower.nf/) ([#2247](https://github.com/nf-core/tools/pull/2247)). +- Refactored the CLI for `--singularity-cache` in `nf-core download` from a flag to an argument. The prior options were renamed to `amend` (container images are only saved in the `$NXF_SINGULARITY_CACHEDIR`) and `copy` (a copy of the image is saved with the download). `remote` was newly introduced and allows to provide a table of contents of a remote cache via an additional argument `--singularity-cache-index` ([#2247](https://github.com/nf-core/tools/pull/2247)). +- Refactored the CLI parameters related to container images. Although downloading other images than those of the Singularity/Apptainer container system is not supported for the time being, a generic name for the parameters seemed preferable. So the new parameter `--singularity-cache-index` introduced in [#2247](https://github.com/nf-core/tools/pull/2247) has been renamed to `--container-cache-index` prior to release ([#2336](https://github.com/nf-core/tools/pull/2336)). +- To address issue [#2311](https://github.com/nf-core/tools/issues/2311), a new parameter `--container-library` was created allowing to specify the container library (registry) from which container images in OCI format (Docker) should be pulled ([#2336](https://github.com/nf-core/tools/pull/2336)). +- Container detection in configs was improved. This allows for DSL2-like container definitions inside the container parameter value provided to process scopes [#2346](https://github.com/nf-core/tools/pull/2346). +- Add apptainer to the list of false positve container strings ([#2353](https://github.com/nf-core/tools/pull/2353)). + +#### Updated CLI parameters + +| Old parameter | New parameter | +| --------------------- | ---------------------------------------------- | +| new parameter | `-d` / `--download-configuration` | +| new parameter | `-t` / `--tower` | +| `-c`/ `--container` | `-s` / `--container-system ` | +| new parameter | `-l` / `--container-library ` | +| `--singularity-cache` | `-u` / `--container-cache-utilisation ` | +| new parameter | `-i` / `--container-cache-index ` | + +_In addition, `-r` / `--revision` has been changed to a parameter that can be provided multiple times so several revisions can be downloaded at once._ + +### Linting + +- Warn if container access is denied ([#2270](https://github.com/nf-core/tools/pull/2270)) +- Error if module container specification has quay.io as prefix when it shouldn't have ([#2278])(https://github.com/nf-core/tools/pull/2278/files) +- Detect if container is 'simple name' and try to contact quay.io server by default ([#2281](https://github.com/nf-core/tools/pull/2281)) +- Warn about null/None/empty default values in `nextflow_schema.json` ([#3328](https://github.com/nf-core/tools/pull/2328)) +- Fix linting when creating a pipeline skipping some parts of the template and add CI test ([#2330](https://github.com/nf-core/tools/pull/2330)) + +### Modules + +- Don't update `modules_json` object if a module is not updated ([#2323](https://github.com/nf-core/tools/pull/2323)) + +### Subworkflows + +### General + +- GitPod base image: Always self-update to the latest version of Nextflow. Add [pre-commit](https://pre-commit.com/) dependency. +- GitPod configs: Update Nextflow as an init task, init pre-commit in pipeline config. +- Refgenie: Create `nxf_home/nf-core/refgenie_genomes.config` path if it doesn't exist ([#2312](https://github.com/nf-core/tools/pull/2312)) +- Add CI tests to test running a pipeline whe it's created from a template skipping different areas + # [v2.8 - Ruthenium Monkey](https://github.com/nf-core/tools/releases/tag/2.8) - [2023-04-27] ### Template diff --git a/README.md b/README.md index 0de42e86e8..012e4c4b12 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ A python package with helper tools for the nf-core community. - [`nf-core` tools update](#update-tools) - [`nf-core list` - List available pipelines](#listing-pipelines) - [`nf-core launch` - Run a pipeline with interactive parameter prompts](#launch-a-pipeline) -- [`nf-core download` - Download pipeline for offline use](#downloading-pipelines-for-offline-use) +- [`nf-core download` - Download a pipeline for offline use](#downloading-pipelines-for-offline-use) - [`nf-core licences` - List software licences in a pipeline](#pipeline-software-licences) - [`nf-core create` - Create a new pipeline with the nf-core template](#creating-a-new-pipeline) - [`nf-core lint` - Check pipeline code against nf-core guidelines](#linting-a-workflow) @@ -327,7 +327,7 @@ Each option has a flag, if all are supplied then it will run without any user in working_dir: tmp --> -![`nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -c none`](docs/images/nf-core-download.svg) +![`nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d`](docs/images/nf-core-download.svg) Once downloaded, you will see something like the following file structure for the downloaded pipeline: @@ -343,18 +343,20 @@ You can run the pipeline by simply providing the directory path for the `workflo nextflow run /path/to/download/nf-core-rnaseq-dev/workflow/ --input mydata.csv --outdir results # usual parameters here ``` -> Note that if you downloaded singularity images, you will need to use `-profile singularity` or have it enabled in your config file. +> Note that if you downloaded Singularity container images, you will need to use `-profile singularity` or have it enabled in your config file. ### Downloaded nf-core configs The pipeline files are automatically updated (`params.custom_config_base` is set to `../configs`), so that the local copy of institutional configs are available when running the pipeline. So using `-profile ` should work if available within [nf-core/configs](https://github.com/nf-core/configs). -### Downloading singularity containers +> ⚠️ This option is not available when downloading a pipeline for use with [Nextflow Tower](#adapting-downloads-to-nextflow-tower) because the application manages all configurations separately. -If you're using Singularity, the `nf-core download` command can also fetch the required Singularity container images for you. -To do this, select `singularity` in the prompt or specify `--container singularity` in the command. -Your archive / target output directory will then include three folders: `workflow`, `configs` and also `singularity-containers`. +### Downloading Apptainer containers + +If you're using [Singularity](https://apptainer.org) (Apptainer), the `nf-core download` command can also fetch the required container images for you. +To do this, select `singularity` in the prompt or specify `--container-system singularity` in the command. +Your archive / target output directory will then also include a separate folder `singularity-containers`. The downloaded workflow files are again edited to add the following line to the end of the pipeline's `nextflow.config` file: @@ -372,10 +374,9 @@ We highly recommend setting the `$NXF_SINGULARITY_CACHEDIR` environment variable If found, the tool will fetch the Singularity images to this directory first before copying to the target output archive / directory. Any images previously fetched will be found there and copied directly - this includes images that may be shared with other pipelines or previous pipeline version downloads or download attempts. -If you are running the download on the same system where you will be running the pipeline (eg. a shared filesystem where Nextflow won't have an internet connection at a later date), you can choose to _only_ use the cache via a prompt or cli options `--singularity-cache-only` / `--singularity-cache-copy`. +If you are running the download on the same system where you will be running the pipeline (eg. a shared filesystem where Nextflow won't have an internet connection at a later date), you can choose to _only_ use the cache via a prompt or cli options `--container-cache-utilisation amend`. This instructs `nf-core download` to fetch all Singularity images to the `$NXF_SINGULARITY_CACHEDIR` directory but does _not_ copy them to the workflow archive / directory. The workflow config file is _not_ edited. This means that when you later run the workflow, Nextflow will just use the cache folder directly. -This instructs `nf-core download` to fetch all Singularity images to the `$NXF_SINGULARITY_CACHEDIR` directory but does _not_ copy them to the workflow archive / directory. -The workflow config file is _not_ edited. This means that when you later run the workflow, Nextflow will just use the cache folder directly. +If you are downloading a workflow for a different system, you can provide information about the contents of its image cache to `nf-core download`. To avoid unnecessary container image downloads, choose `--container-cache-utilisation remote` and provide a list of already available images as plain text file to `--container-cache-index my_list_of_remotely_available_images.txt`. To generate this list on the remote system, run `find $NXF_SINGULARITY_CACHEDIR -name "*.img" > my_list_of_remotely_available_images.txt`. The tool will then only download and copy images into your output directory, which are missing on the remote system. #### How the Singularity image downloads work @@ -386,21 +387,29 @@ The Singularity image download finds containers using two methods: 2. It scrapes any files it finds with a `.nf` file extension in the workflow `modules` directory for lines that look like `container = "xxx"`. This is the typical method for DSL2 pipelines, which have one container per process. -Some DSL2 modules have container addresses for docker (eg. `biocontainers/fastqc:0.11.9--0`) and also URLs for direct downloads of a Singularity continaer (eg. `https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0`). +Some DSL2 modules have container addresses for docker (eg. `biocontainers/fastqc:0.11.9--0`) and also URLs for direct downloads of a Singularity container (eg. `https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0`). Where both are found, the download URL is preferred. Once a full list of containers is found, they are processed in the following order: -1. If the target image already exists, nothing is done (eg. with `$NXF_SINGULARITY_CACHEDIR` and `--singularity-cache-only` specified) -2. If found in `$NXF_SINGULARITY_CACHEDIR` and `--singularity-cache-only` is _not_ specified, they are copied to the output directory +1. If the target image already exists, nothing is done (eg. with `$NXF_SINGULARITY_CACHEDIR` and `--container-cache-utilisation amend` specified) +2. If found in `$NXF_SINGULARITY_CACHEDIR` and `--container-cache-utilisation copy` is specified, they are copied to the output directory 3. If they start with `http` they are downloaded directly within Python (default 4 at a time, you can customise this with `--parallel-downloads`) -4. If they look like a Docker image name, they are fetched using a `singularity pull` command - - This requires Singularity to be installed on the system and is substantially slower +4. If they look like a Docker image name, they are fetched using a `singularity pull` command. Choose the container libraries (registries) queried by providing one or multiple `--container-library` parameter(s). For example, if you call `nf-core download` with `-l quay.io -l ghcr.io -l docker.io`, every image will be pulled from `quay.io` unless an error is encountered. Subsequently, `ghcr.io` and then `docker.io` will be queried for any image that has failed before. + - This requires Singularity/Apptainer to be installed on the system and is substantially slower -Note that compressing many GBs of binary files can be slow, so specifying `--compress none` is recommended when downloading Singularity images. +Note that compressing many GBs of binary files can be slow, so specifying `--compress none` is recommended when downloading Singularity images that are copied to the output directory. If the download speeds are much slower than your internet connection is capable of, you can set `--parallel-downloads` to a large number to download loads of images at once. +### Adapting downloads to Nextflow Tower + +[seqeralabs® Nextflow Tower](https://cloud.tower.nf/) provides a graphical user interface to oversee pipeline runs, gather statistics and configure compute resources. While pipelines added to _Tower_ are preferably hosted at a Git service, providing them as disconnected, self-reliant repositories is also possible for premises with restricted network access. Choosing the `--tower` flag will download the pipeline in an appropriate form. + +Subsequently, the `*.git` folder can be moved to it's final destination and linked with a pipeline in _Tower_ using the `file:/` prefix. + +> 💡 Also without access to Tower, pipelines downloaded with the `--tower` flag can be run: `nextflow run -r 2.5 file:/path/to/pipelinedownload.git`. Downloads in this format allow you to include multiple revisions of a pipeline in a single file, but require that the revision (e.g. `-r 2.5`) is always explicitly specified. + ## Pipeline software licences Sometimes it's useful to see the software licences of the tools used in a pipeline. @@ -562,7 +571,7 @@ timeout: 10 after_command: rm nf-params.json --> -![`nf-core schema validate nf-core-rnaseq/workflow nf-params.json`](docs/images/nf-core-schema-validate.svg) +![`nf-core schema validate nf-core-rnaseq/3_8 nf-params.json`](docs/images/nf-core-schema-validate.svg) The `pipeline` option can be a directory containing a pipeline, a path to a schema file or the name of an nf-core pipeline (which will be downloaded using `nextflow pull`). @@ -899,6 +908,7 @@ The `nf-core modules create` command will prompt you with the relevant questions @@ -936,6 +946,8 @@ before_command: sed 's/1.13a/1.10/g' modules/multiqc/main.nf > modules/multiqc/m To run unit tests of a module that you have installed or the test created by the command [`nf-core modules create-test-yml`](#create-a-module-test-config-file), you can use `nf-core modules test` command. This command runs the tests specified in `modules/tests/software///test.yml` file using [pytest](https://pytest-workflow.readthedocs.io/en/stable/). +> This command uses the pytest argument `--git-aware` to avoid copying the whole `.git` directory and files ignored by `git`. This means that it will only include files listed by `git ls-files`. Remember to **commit your changes** after adding a new module to add the new files to your git index. + You can specify the module name in the form TOOL/SUBTOOL in command line or provide it later by prompts. -![`cd modules && nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force`](docs/images/nf-core-subworkflows-create.svg) +![`nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force`](docs/images/nf-core-subworkflows-create.svg) ### Create a subworkflow test config file @@ -1194,6 +1205,8 @@ extra_env: To run unit tests of a subworkflow that you have installed or the test created by the command [`nf-core subworkflow create-test-yml`](#create-a-subworkflow-test-config-file), you can use `nf-core subworkflows test` command. This command runs the tests specified in `tests/subworkflows//test.yml` file using [pytest](https://pytest-workflow.readthedocs.io/en/stable/). +> This command uses the pytest argument `--git-aware` to avoid copying the whole `.git` directory and files ignored by `git`. This means that it will only include files listed by `git ls-files`. Remember to **commit your changes** after adding a new subworkflow to add the new files to your git index. + You can specify the subworkflow name in the form TOOL/SUBTOOL in command line or provide it later by prompts. - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + - - $ nf-core bump-version 1.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO     Changing version number from '1.0dev' to '1.1' -INFO     Updated version in 'nextflow.config' - - version         = '1.0dev' - + version = '1.1' - - + + $ nf-core bump-version 1.1 + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +INFO     Changing version number from '1.0dev' to '1.1' +INFO     Updated version in 'nextflow.config' + - version         = '1.0dev' + + version = '1.1' + + +INFO     Updated version in 'assets/multiqc_config.yml' + - This report has been generated by the <a  +href="https://github.com/nf-core/nextbigthing/1.0dev" target="_blank">nf-core/nextbigthing</a> + + This report has been generated by the <a  +href="https://github.com/nf-core/nextbigthing/1.1" target="_blank">nf-core/nextbigthing</a> + - <a href="https://nf-co.re/nextbigthing/1.0dev/output"  +target="_blank">documentation</a>. + + <a href="https://nf-co.re/nextbigthing/1.1/output" target="_blank">documentation</a>. + + diff --git a/docs/images/nf-core-create.svg b/docs/images/nf-core-create.svg index aaae85f91f..c0a7a6db85 100644 --- a/docs/images/nf-core-create.svg +++ b/docs/images/nf-core-create.svg @@ -19,104 +19,104 @@ font-weight: 700; } - .terminal-1755763245-matrix { + .terminal-2004013614-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1755763245-title { + .terminal-2004013614-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1755763245-r1 { fill: #c5c8c6 } -.terminal-1755763245-r2 { fill: #98a84b } -.terminal-1755763245-r3 { fill: #9a9b99 } -.terminal-1755763245-r4 { fill: #608ab1 } -.terminal-1755763245-r5 { fill: #d0b344 } -.terminal-1755763245-r6 { fill: #98729f } -.terminal-1755763245-r7 { fill: #ff2c7a } -.terminal-1755763245-r8 { fill: #98a84b;font-weight: bold } -.terminal-1755763245-r9 { fill: #1984e9;text-decoration: underline; } + .terminal-2004013614-r1 { fill: #c5c8c6 } +.terminal-2004013614-r2 { fill: #98a84b } +.terminal-2004013614-r3 { fill: #9a9b99 } +.terminal-2004013614-r4 { fill: #608ab1 } +.terminal-2004013614-r5 { fill: #d0b344 } +.terminal-2004013614-r6 { fill: #98729f } +.terminal-2004013614-r7 { fill: #ff2c7a } +.terminal-2004013614-r8 { fill: #98a84b;font-weight: bold } +.terminal-2004013614-r9 { fill: #1984e9;text-decoration: underline; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -128,34 +128,34 @@ - + - - $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique"  --a "Big Steve" --plain - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO     Creating new nf-core pipeline: 'nf-core/nextbigthing' -INFO     Initialising pipeline git repository                                                        -INFO     Done. Remember to add a remote and push to GitHub:                                          - cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing - git remote add origin git@github.com:USERNAME/REPO_NAME.git  - git push --all origin                                        -INFO     This will also push your newly created dev branch and the TEMPLATE branch for syncing.      -INFO    !!!!!! IMPORTANT !!!!!! - -If you are interested in adding your pipeline to the nf-core community, -PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! - -Please read: https://nf-co.re/developers/adding_pipelines#join-the-community + + $ nf-core create -n nextbigthing -d "This pipeline analyses data from the next big omics technique"  +-a "Big Steve" --plain + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +INFO     Creating new nf-core pipeline: 'nf-core/nextbigthing' +INFO     Initialising pipeline git repository                                                        +INFO     Done. Remember to add a remote and push to GitHub:                                          + cd /home/runner/work/tools/tools/tmp/nf-core-nextbigthing + git remote add origin git@github.com:USERNAME/REPO_NAME.git  + git push --all origin                                        +INFO     This will also push your newly created dev branch and the TEMPLATE branch for syncing.      +INFO    !!!!!! IMPORTANT !!!!!! + +If you are interested in adding your pipeline to the nf-core community, +PLEASE COME AND TALK TO US IN THE NF-CORE SLACK BEFORE WRITING ANY CODE! + +Please read: https://nf-co.re/developers/adding_pipelines#join-the-community diff --git a/docs/images/nf-core-download-tree.svg b/docs/images/nf-core-download-tree.svg index a06c743fdf..fc9585c8c9 100644 --- a/docs/images/nf-core-download-tree.svg +++ b/docs/images/nf-core-download-tree.svg @@ -19,123 +19,123 @@ font-weight: 700; } - .terminal-3113317903-matrix { + .terminal-3298203246-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3113317903-title { + .terminal-3298203246-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3113317903-r1 { fill: #c5c8c6 } + .terminal-3298203246-r1 { fill: #c5c8c6 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -147,43 +147,43 @@ - + - - $ tree -L 2 nf-core-rnaseq/ -nf-core-rnaseq/ -├── configs -│   ├── CITATION.cff -│   ├── LICENSE -│   ├── README.md -│   ├── bin -│   ├── conf -│   ├── configtest.nf -│   ├── docs -│   ├── nextflow.config -│   ├── nfcore_custom.config -│   └── pipeline -└── workflow -    ├── CHANGELOG.md -    ├── CITATIONS.md -    ├── CODE_OF_CONDUCT.md -    ├── LICENSE -    ├── README.md -    ├── assets -    ├── bin -    ├── conf -    ├── docs -    ├── lib -    ├── main.nf -    ├── modules -    ├── modules.json -    ├── nextflow.config -    ├── nextflow_schema.json -    ├── subworkflows -    ├── tower.yml -    └── workflows - -14 directories, 16 files + + $ tree -L 2 nf-core-rnaseq/ +nf-core-rnaseq/ +├── 3_8 +│   ├── CHANGELOG.md +│   ├── CITATIONS.md +│   ├── CODE_OF_CONDUCT.md +│   ├── LICENSE +│   ├── README.md +│   ├── assets +│   ├── bin +│   ├── conf +│   ├── docs +│   ├── lib +│   ├── main.nf +│   ├── modules +│   ├── modules.json +│   ├── nextflow.config +│   ├── nextflow_schema.json +│   ├── subworkflows +│   ├── tower.yml +│   └── workflows +└── configs +    ├── CITATION.cff +    ├── LICENSE +    ├── README.md +    ├── bin +    ├── conf +    ├── configtest.nf +    ├── docs +    ├── nextflow.config +    ├── nfcore_custom.config +    └── pipeline + +14 directories, 16 files diff --git a/docs/images/nf-core-download.svg b/docs/images/nf-core-download.svg index a52773fd6d..69b0b5b29b 100644 --- a/docs/images/nf-core-download.svg +++ b/docs/images/nf-core-download.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - + - + - - $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -c none - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO     Saving 'nf-core/rnaseq' -          Pipeline revision: '3.8' -          Pull containers: 'none' -          Output directory: 'nf-core-rnaseq' -INFO     Downloading workflow files from GitHub                                                      -INFO     Downloading centralised configs from GitHub                                                 + + $ nf-core download rnaseq -r 3.8 --outdir nf-core-rnaseq -x none -s none -d + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +INFO     Saving 'nf-core/rnaseq' +          Pipeline revision: '3.8' +          Use containers: 'none' +          Container library: 'quay.io' +          Output directory: 'nf-core-rnaseq' +          Include default institutional configuration: 'True' +INFO     Downloading centralised configs from GitHub                                                 +INFO     Downloading workflow files from GitHub                                                      diff --git a/docs/images/nf-core-launch-rnaseq.svg b/docs/images/nf-core-launch-rnaseq.svg index ddeb0f6e7c..8505f47e74 100644 --- a/docs/images/nf-core-launch-rnaseq.svg +++ b/docs/images/nf-core-launch-rnaseq.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-1252240808-matrix { + .terminal-1328852393-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1252240808-title { + .terminal-1328852393-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1252240808-r1 { fill: #c5c8c6 } -.terminal-1252240808-r2 { fill: #98a84b } -.terminal-1252240808-r3 { fill: #9a9b99 } -.terminal-1252240808-r4 { fill: #608ab1 } -.terminal-1252240808-r5 { fill: #d0b344 } -.terminal-1252240808-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-1252240808-r7 { fill: #68a0b3;font-weight: bold } + .terminal-1328852393-r1 { fill: #c5c8c6 } +.terminal-1328852393-r2 { fill: #98a84b } +.terminal-1328852393-r3 { fill: #9a9b99 } +.terminal-1328852393-r4 { fill: #608ab1 } +.terminal-1328852393-r5 { fill: #d0b344 } +.terminal-1328852393-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1328852393-r7 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,24 +96,24 @@ - + - - $ nf-core launch rnaseq -r 3.8.1 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO     NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config      -         files or profiles                                                                           - -INFO     Downloading workflow: nf-core/rnaseq (3.8.1) + + $ nf-core launch rnaseq -r 3.8.1 + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +INFO     NOTE: This tool ignores any pipeline parameter defaults overwritten by Nextflow config      +         files or profiles                                                                           + +INFO     Downloading workflow: nf-core/rnaseq (3.8.1) diff --git a/docs/images/nf-core-licences.svg b/docs/images/nf-core-licences.svg index de6dff2e78..9a72e4beda 100644 --- a/docs/images/nf-core-licences.svg +++ b/docs/images/nf-core-licences.svg @@ -19,108 +19,108 @@ font-weight: 700; } - .terminal-2581383703-matrix { + .terminal-2740439576-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2581383703-title { + .terminal-2740439576-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2581383703-r1 { fill: #c5c8c6 } -.terminal-2581383703-r2 { fill: #98a84b } -.terminal-2581383703-r3 { fill: #9a9b99 } -.terminal-2581383703-r4 { fill: #608ab1 } -.terminal-2581383703-r5 { fill: #d0b344 } -.terminal-2581383703-r6 { fill: #68a0b3;font-weight: bold } -.terminal-2581383703-r7 { fill: #c5c8c6;font-weight: bold } + .terminal-2740439576-r1 { fill: #c5c8c6 } +.terminal-2740439576-r2 { fill: #98a84b } +.terminal-2740439576-r3 { fill: #9a9b99 } +.terminal-2740439576-r4 { fill: #608ab1 } +.terminal-2740439576-r5 { fill: #d0b344 } +.terminal-2740439576-r6 { fill: #68a0b3;font-weight: bold } +.terminal-2740439576-r7 { fill: #c5c8c6;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -132,36 +132,36 @@ - + - - $ nf-core licences deepvariant - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO     Fetching licence information for 8 tools                                                    -INFO     Warning: This tool only prints licence information for the software tools packaged using    -         conda.                                                                                      -INFO     The pipeline may use other software and dependencies not described here.                    -┏━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓ -Package NameVersionLicence -┡━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩ -│ lbzip2       │ 2.5     │ GPL v3  │ -│ deepvariant  │ 0.7.0   │ MIT     │ -│ htslib       │ 1.9     │ MIT     │ -│ picard       │ 2.18.7  │ MIT     │ -│ pip          │ 10.0.1  │ MIT     │ -│ samtools     │ 1.9     │ MIT     │ -│ python       │ 2.7.15  │ PSF     │ -│ bzip2        │ 1.0.6   │ bzip2   │ -└──────────────┴─────────┴─────────┘ + + $ nf-core licences deepvariant + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +INFO     Fetching licence information for 8 tools                                                    +INFO     Warning: This tool only prints licence information for the software tools packaged using    +         conda.                                                                                      +INFO     The pipeline may use other software and dependencies not described here.                    +┏━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┓ +Package NameVersionLicence +┡━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━┩ +│ lbzip2       │ 2.5     │ GPL v3  │ +│ deepvariant  │ 0.7.0   │ MIT     │ +│ htslib       │ 1.9     │ MIT     │ +│ picard       │ 2.18.7  │ MIT     │ +│ pip          │ 10.0.1  │ MIT     │ +│ samtools     │ 1.9     │ MIT     │ +│ python       │ 2.7.15  │ PSF     │ +│ bzip2        │ 1.0.6   │ bzip2   │ +└──────────────┴─────────┴─────────┘ diff --git a/docs/images/nf-core-lint.svg b/docs/images/nf-core-lint.svg index 89e8c43d82..268989f7eb 100644 --- a/docs/images/nf-core-lint.svg +++ b/docs/images/nf-core-lint.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + - - $ nf-core lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Testing pipeline: . - - -╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ - -pipeline_todos: pipeline_todos - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ - -readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo  -doi (after the first release). - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ - -╭──────────────────────╮ -LINT RESULTS SUMMARY -├──────────────────────┤ -[✔] 183 Tests Passed -[?]   1 Test Ignored -[!]   1 Test Warning -[✗]   0 Tests Failed -╰──────────────────────╯ + + $ nf-core lint + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Testing pipeline: . + + +╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ + +pipeline_todos: pipeline_todos + +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ + +readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo  +doi (after the first release). + +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ + +╭─[!] 3 Module Test Warnings─────────────────────────────────────────────────────────────────────╮ +                                           ╷                          ╷                            +Module name                              File path               Test message              +╶──────────────────────────────────────────┼──────────────────────────┼──────────────────────────╴ +custom/dumpsoftwareversionsmodules/nf-core/custom/…New version available +fastqcmodules/nf-core/fastqc  New version available +multiqcmodules/nf-core/multiqc New version available +                                           ╵                          ╵                            +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭───────────────────────╮ +LINT RESULTS SUMMARY  +├───────────────────────┤ +[✔] 181 Tests Passed +[?]   1 Test Ignored +[!]   4 Test Warnings +[✗]   0 Tests Failed +╰───────────────────────╯ diff --git a/docs/images/nf-core-list-rna.svg b/docs/images/nf-core-list-rna.svg index 0aa93b0d91..414e75d9ba 100644 --- a/docs/images/nf-core-list-rna.svg +++ b/docs/images/nf-core-list-rna.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + - + - + - - $ nf-core list rna rna-seq - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ -Pipeline Name       StarsLatest Release   ReleasedLast PulledHave latest release? -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ -│ smrnaseq             │    49 │          2.2.0 │   yesterday │           - │ -                    │ -│ rnafusion            │    95 │          2.3.4 │  2 days ago │           - │ -                    │ -│ rnaseq               │   604 │         3.11.2 │  3 days ago │           - │ -                    │ -│ dualrnaseq           │    12 │          1.0.0 │ 2 years ago │           - │ -                    │ -│ circrna              │    27 │            dev │           - │           - │ -                    │ -│ lncpipe              │    25 │            dev │           - │           - │ -                    │ -│ scflow               │    19 │            dev │           - │           - │ -                    │ -│ spatialtranscriptom… │    19 │            dev │           - │           - │ -                    │ -└──────────────────────┴───────┴────────────────┴─────────────┴─────────────┴──────────────────────┘ + + $ nf-core list rna rna-seq + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ marsseq              │     3 │          1.0.1 │   4 days ago │           - │ -                   │ +│ scrnaseq             │    87 │          2.3.2 │  3 weeks ago │           - │ -                   │ +│ rnaseq               │   636 │         3.12.0 │  4 weeks ago │           - │ -                   │ +│ smrnaseq             │    50 │          2.2.1 │ 2 months ago │           - │ -                   │ +│ rnafusion            │   105 │          2.3.4 │ 2 months ago │           - │ -                   │ +│ differentialabundan… │    22 │          1.2.0 │ 2 months ago │           - │ -                   │ +│ dualrnaseq           │    13 │          1.0.0 │  2 years ago │           - │ -                   │ +│ circrna              │    27 │            dev │            - │           - │ -                   │ +│ lncpipe              │    25 │            dev │            - │           - │ -                   │ +│ scflow               │    19 │            dev │            - │           - │ -                   │ +│ spatialtranscriptom… │    24 │            dev │            - │           - │ -                   │ +└──────────────────────┴───────┴────────────────┴──────────────┴─────────────┴─────────────────────┘ diff --git a/docs/images/nf-core-list-stars.svg b/docs/images/nf-core-list-stars.svg index bd24375ed5..06349aa3a1 100644 --- a/docs/images/nf-core-list-stars.svg +++ b/docs/images/nf-core-list-stars.svg @@ -19,88 +19,88 @@ font-weight: 700; } - .terminal-3741580213-matrix { + .terminal-960691040-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3741580213-title { + .terminal-960691040-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3741580213-r1 { fill: #c5c8c6 } -.terminal-3741580213-r2 { fill: #98a84b } -.terminal-3741580213-r3 { fill: #9a9b99 } -.terminal-3741580213-r4 { fill: #608ab1 } -.terminal-3741580213-r5 { fill: #d0b344 } -.terminal-3741580213-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-3741580213-r7 { fill: #868887 } -.terminal-3741580213-r8 { fill: #868887;font-style: italic; } + .terminal-960691040-r1 { fill: #c5c8c6 } +.terminal-960691040-r2 { fill: #98a84b } +.terminal-960691040-r3 { fill: #9a9b99 } +.terminal-960691040-r4 { fill: #608ab1 } +.terminal-960691040-r5 { fill: #d0b344 } +.terminal-960691040-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-960691040-r7 { fill: #868887 } +.terminal-960691040-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -112,29 +112,29 @@ - + - - $ nf-core list -s stars - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnaseq              │   604 │         3.11.2 │    3 days ago │           - │ -                   │ -│ sarek               │   235 │          3.1.2 │  4 months ago │           - │ -                   │ -│ chipseq             │   144 │          2.0.0 │  7 months ago │           - │ -                   │ -│ atacseq             │   134 │            2.0 │  5 months ago │           - │ -                   │ -[..truncated..] + + $ nf-core list -s stars + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ rnaseq               │   636 │         3.12.0 │  4 weeks ago │           - │ -                   │ +│ sarek                │   252 │          3.2.3 │   1 week ago │           - │ -                   │ +│ chipseq              │   148 │          2.0.0 │ 9 months ago │           - │ -                   │ +│ atacseq              │   141 │            2.0 │ 7 months ago │           - │ -                   │ +[..truncated..] diff --git a/docs/images/nf-core-list.svg b/docs/images/nf-core-list.svg index 3c4a4cd4df..92324289f4 100644 --- a/docs/images/nf-core-list.svg +++ b/docs/images/nf-core-list.svg @@ -19,91 +19,91 @@ font-weight: 700; } - .terminal-3979640600-matrix { + .terminal-4201938007-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3979640600-title { + .terminal-4201938007-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3979640600-r1 { fill: #c5c8c6 } -.terminal-3979640600-r2 { fill: #98a84b } -.terminal-3979640600-r3 { fill: #9a9b99 } -.terminal-3979640600-r4 { fill: #608ab1 } -.terminal-3979640600-r5 { fill: #d0b344 } -.terminal-3979640600-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-3979640600-r7 { fill: #868887 } -.terminal-3979640600-r8 { fill: #868887;font-style: italic; } + .terminal-4201938007-r1 { fill: #c5c8c6 } +.terminal-4201938007-r2 { fill: #98a84b } +.terminal-4201938007-r3 { fill: #9a9b99 } +.terminal-4201938007-r4 { fill: #608ab1 } +.terminal-4201938007-r5 { fill: #d0b344 } +.terminal-4201938007-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-4201938007-r7 { fill: #868887 } +.terminal-4201938007-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -115,30 +115,30 @@ - + - - $ nf-core list - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ funcscan            │    29 │          1.1.0 │  14 hours ago │           - │ -                   │ -│ smrnaseq            │    49 │          2.2.0 │     yesterday │           - │ -                   │ -│ rnafusion           │    95 │          2.3.4 │    2 days ago │           - │ -                   │ -│ rnaseq              │   604 │         3.11.2 │    3 days ago │           - │ -                   │ -│ demultiplex         │    25 │          1.2.0 │    3 days ago │           - │ -                   │ -[..truncated..] + + $ nf-core list + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name       StarsLatest Release    ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ funcscan             │    35 │          1.1.2 │  5 hours ago │           - │ -                   │ +│ ampliseq             │   118 │          2.6.1 │   2 days ago │           - │ -                   │ +│ circdna              │    16 │          1.0.4 │   3 days ago │           - │ -                   │ +│ marsseq              │     3 │          1.0.1 │   4 days ago │           - │ -                   │ +│ nanostring           │     4 │          1.1.1 │   6 days ago │           - │ -                   │ +[..truncated..] diff --git a/docs/images/nf-core-modules-bump-version.svg b/docs/images/nf-core-modules-bump-version.svg index 0a9094ad26..3bafb91264 100644 --- a/docs/images/nf-core-modules-bump-version.svg +++ b/docs/images/nf-core-modules-bump-version.svg @@ -1,4 +1,4 @@ - + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + - - $ nf-core modules bump-versions fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - + + $ nf-core modules bump-versions fastqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + + +╭──────────────────────────────────────────────────────────────────────────────────────────────────╮ +[!] 1 Module version up to date. +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭──────────────────────────────────────────┬───────────────────────────────────────────────────────╮ +Module name                             Update Message                                        +├──────────────────────────────────────────┼───────────────────────────────────────────────────────┤ + fastqc                                    Module version up to date: fastqc                      +╰──────────────────────────────────────────┴───────────────────────────────────────────────────────╯ diff --git a/docs/images/nf-core-modules-create-test.svg b/docs/images/nf-core-modules-create-test.svg index e49f6dcffa..9af81b95f8 100644 --- a/docs/images/nf-core-modules-create-test.svg +++ b/docs/images/nf-core-modules-create-test.svg @@ -19,84 +19,84 @@ font-weight: 700; } - .terminal-1372103280-matrix { + .terminal-1526832300-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1372103280-title { + .terminal-1526832300-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1372103280-r1 { fill: #c5c8c6 } -.terminal-1372103280-r2 { fill: #98a84b } -.terminal-1372103280-r3 { fill: #9a9b99 } -.terminal-1372103280-r4 { fill: #608ab1 } -.terminal-1372103280-r5 { fill: #d0b344 } -.terminal-1372103280-r6 { fill: #ff2c7a } -.terminal-1372103280-r7 { fill: #98729f } + .terminal-1526832300-r1 { fill: #c5c8c6 } +.terminal-1526832300-r2 { fill: #98a84b } +.terminal-1526832300-r3 { fill: #9a9b99 } +.terminal-1526832300-r4 { fill: #608ab1 } +.terminal-1526832300-r5 { fill: #d0b344 } +.terminal-1526832300-r6 { fill: #ff2c7a } +.terminal-1526832300-r7 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -108,28 +108,28 @@ - + - - $ nf-core modules create-test-yml fastqc --no-prompts --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Looking for test workflow entry points: 'tests/modules/nf-core/fastqc/main.nf' -──────────────────────────────────────────────────────────────────────────────────────────────────── -INFO     Building test meta for entry point 'test_fastqc_paired_end' -INFO     Running 'fastqc' test with command:                                                         -nextflow run ./tests/modules/nf-core/fastqc -entry test_fastqc_paired_end -c  -./tests/config/nextflow.config -c ./tests/modules/nf-core/fastqc/nextflow.config --outdir  -/tmp/tmpnqfm1ogi -work-dir /tmp/tmpcv36s2sh + + $ nf-core modules create-test-yml fastqc --no-prompts --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Looking for test workflow entry points: 'tests/modules/nf-core/fastqc/main.nf' +──────────────────────────────────────────────────────────────────────────────────────────────────── +INFO     Building test meta for entry point 'test_fastqc' +INFO     Running 'fastqc' test with command:                                                         +nextflow run ./tests/modules/nf-core/fastqc -entry test_fastqc -c  +./tests/config/nextflow.config -c ./tests/modules/nf-core/fastqc/nextflow.config --outdir  +/tmp/tmpe8nlzgcc -work-dir /tmp/tmputkzu3j5 diff --git a/docs/images/nf-core-modules-create.svg b/docs/images/nf-core-modules-create.svg index 3a97b353d1..70a03b29a7 100644 --- a/docs/images/nf-core-modules-create.svg +++ b/docs/images/nf-core-modules-create.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - + - - $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Using Bioconda package: 'bioconda::fastqc=0.12.1' + + $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Repository type: modules +INFO    Press enter to use default values (shown in brackets)or type your own responses.  +ctrl+click underlined text to open links. +INFO     Using Bioconda package: 'bioconda::fastqc=0.12.1' +INFO     Using Docker container: 'biocontainers/fastqc:0.12.1--hdfd78af_0' +INFO     Using Singularity container:                                                                +'https://depot.galaxyproject.org/singularity/fastqc:0.12.1--hdfd78af_0' +INFO     Created / edited following files:                                                           +           ./modules/nf-core/fastqc/main.nf +           ./modules/nf-core/fastqc/meta.yml +           ./tests/modules/nf-core/fastqc/main.nf +           ./tests/modules/nf-core/fastqc/test.yml +           ./tests/modules/nf-core/fastqc/nextflow.config +           ./tests/config/pytest_modules.yml diff --git a/docs/images/nf-core-modules-info.svg b/docs/images/nf-core-modules-info.svg index 5bd142d9aa..3a65ab2733 100644 --- a/docs/images/nf-core-modules-info.svg +++ b/docs/images/nf-core-modules-info.svg @@ -19,163 +19,163 @@ font-weight: 700; } - .terminal-957411833-matrix { + .terminal-1537339898-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-957411833-title { + .terminal-1537339898-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-957411833-r1 { fill: #c5c8c6 } -.terminal-957411833-r2 { fill: #98a84b } -.terminal-957411833-r3 { fill: #9a9b99 } -.terminal-957411833-r4 { fill: #608ab1 } -.terminal-957411833-r5 { fill: #d0b344 } -.terminal-957411833-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-957411833-r7 { fill: #98a84b;font-weight: bold } -.terminal-957411833-r8 { fill: #868887 } -.terminal-957411833-r9 { fill: #d08442 } -.terminal-957411833-r10 { fill: #868887;font-style: italic; } -.terminal-957411833-r11 { fill: #98729f } + .terminal-1537339898-r1 { fill: #c5c8c6 } +.terminal-1537339898-r2 { fill: #98a84b } +.terminal-1537339898-r3 { fill: #9a9b99 } +.terminal-1537339898-r4 { fill: #608ab1 } +.terminal-1537339898-r5 { fill: #d0b344 } +.terminal-1537339898-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1537339898-r7 { fill: #98a84b;font-weight: bold } +.terminal-1537339898-r8 { fill: #868887 } +.terminal-1537339898-r9 { fill: #d08442 } +.terminal-1537339898-r10 { fill: #868887;font-style: italic; } +.terminal-1537339898-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -187,53 +187,53 @@ - + - - $ nf-core modules info abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 🔧 Tools: abacas                                                                                 │ -│ 📖 Description: contiguate draft genome assembly                                                 │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -                  ╷                                                                   ╷              -📥 Inputs        Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} -                  ╵                                                                   ╵              -                  ╷                                                                   ╷              -📤 Outputs       Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - results  (files)│List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* -                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ -                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ -                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ -                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - versions  (file)│File containing software versions                                  │versions.yml -                  ╵                                                                   ╵              - - 💻  Installation command: nf-core modules install abacas - + + $ nf-core modules info abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ +│ 🔧 Tools: abacas                                                                                 │ +│ 📖 Description: contiguate draft genome assembly                                                 │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +                  ╷                                                                   ╷              +📥 Inputs        Description                                                             Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ +                  │single_end:false ]                                                 │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} +                  ╵                                                                   ╵              +                  ╷                                                                   ╷              +📤 Outputs       Description                                                             Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ +                  │single_end:false ]                                                 │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + results  (files)│List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* +                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ +                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ +                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ +                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + versions  (file)│File containing software versions                                  │versions.yml +                  ╵                                                                   ╵              + + 💻  Installation command: nf-core modules install abacas + diff --git a/docs/images/nf-core-modules-install.svg b/docs/images/nf-core-modules-install.svg index f2869ed343..9385dba62f 100644 --- a/docs/images/nf-core-modules-install.svg +++ b/docs/images/nf-core-modules-install.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-2146405380-matrix { + .terminal-2221378565-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2146405380-title { + .terminal-2221378565-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2146405380-r1 { fill: #c5c8c6 } -.terminal-2146405380-r2 { fill: #98a84b } -.terminal-2146405380-r3 { fill: #9a9b99 } -.terminal-2146405380-r4 { fill: #608ab1 } -.terminal-2146405380-r5 { fill: #d0b344 } + .terminal-2221378565-r1 { fill: #c5c8c6 } +.terminal-2221378565-r2 { fill: #98a84b } +.terminal-2221378565-r3 { fill: #9a9b99 } +.terminal-2221378565-r4 { fill: #608ab1 } +.terminal-2221378565-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core modules install abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Installing 'abacas' -INFO     Use the following statement to include this module:                                         - - include { ABACAS } from '../modules/nf-core/abacas/main'                                            - + + $ nf-core modules install abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Installing 'abacas' +INFO     Use the following statement to include this module:                                         + + include { ABACAS } from '../modules/nf-core/abacas/main'                                            + diff --git a/docs/images/nf-core-modules-lint.svg b/docs/images/nf-core-modules-lint.svg index 8f3b419489..08d62c2f3c 100644 --- a/docs/images/nf-core-modules-lint.svg +++ b/docs/images/nf-core-modules-lint.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core modules lint multiqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Linting modules repo: '.' -INFO     Linting module: 'multiqc' - -╭───────────────────────╮ -LINT RESULTS SUMMARY -├───────────────────────┤ -[✔]  24 Tests Passed  -[!]   0 Test Warnings -[✗]   0 Tests Failed  -╰───────────────────────╯ + + $ nf-core modules lint multiqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Linting modules repo: '.' +INFO     Linting module: 'multiqc' diff --git a/docs/images/nf-core-modules-list-local.svg b/docs/images/nf-core-modules-list-local.svg index 3128f1caad..cff9db10e3 100644 --- a/docs/images/nf-core-modules-list-local.svg +++ b/docs/images/nf-core-modules-list-local.svg @@ -19,108 +19,108 @@ font-weight: 700; } - .terminal-4212330781-matrix { + .terminal-136384798-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4212330781-title { + .terminal-136384798-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4212330781-r1 { fill: #c5c8c6 } -.terminal-4212330781-r2 { fill: #98a84b } -.terminal-4212330781-r3 { fill: #9a9b99 } -.terminal-4212330781-r4 { fill: #608ab1 } -.terminal-4212330781-r5 { fill: #d0b344 } -.terminal-4212330781-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-4212330781-r7 { fill: #868887;font-style: italic; } + .terminal-136384798-r1 { fill: #c5c8c6 } +.terminal-136384798-r2 { fill: #98a84b } +.terminal-136384798-r3 { fill: #9a9b99 } +.terminal-136384798-r4 { fill: #608ab1 } +.terminal-136384798-r5 { fill: #d0b344 } +.terminal-136384798-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-136384798-r7 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -132,36 +132,36 @@ - + - - $ nf-core modules list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Modules installed in '.':                                                                   - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Module Name        Repository        Version SHA        Message           Date       -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ custom/dumpsoftwar… │ https://github.co… │ 76cc4938c1f6ea5c7d… │ give execution     │ 2023-04-28 │ -│                     │                    │                     │ permissions to     │            │ -│                     │                    │                     │ dumpsoftwareversi… │            │ -│                     │                    │                     │ (#3347)            │            │ -│ fastqc              │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ -│                     │                    │                     │ syntax for all     │            │ -│                     │                    │                     │ modules (#2654)    │            │ -│ multiqc             │ https://github.co… │ f2d63bd5b68925f98f… │ fix meta.ymls for  │ 2023-04-28 │ -│                     │                    │                     │ dumpsoftware and   │            │ -[..truncated..] + + $ nf-core modules list local + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Modules installed in '.':                                                                   + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ +Module Name        Repository        Version SHA        Message           Date       +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ +│ custom/dumpsoftwar… │ https://github.co… │ 76cc4938c1f6ea5c7d… │ give execution     │ 2023-04-28 │ +│                     │                    │                     │ permissions to     │            │ +│                     │                    │                     │ dumpsoftwareversi… │            │ +│                     │                    │                     │ (#3347)            │            │ +│ fastqc              │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ +│                     │                    │                     │ syntax for all     │            │ +│                     │                    │                     │ modules (#2654)    │            │ +│ multiqc             │ https://github.co… │ f2d63bd5b68925f98f… │ fix meta.ymls for  │ 2023-04-28 │ +│                     │                    │                     │ dumpsoftware and   │            │ +[..truncated..] diff --git a/docs/images/nf-core-modules-list-remote.svg b/docs/images/nf-core-modules-list-remote.svg index 4faf10450f..1896cb078b 100644 --- a/docs/images/nf-core-modules-list-remote.svg +++ b/docs/images/nf-core-modules-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-3737934755-matrix { + .terminal-3884145572-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3737934755-title { + .terminal-3884145572-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3737934755-r1 { fill: #c5c8c6 } -.terminal-3737934755-r2 { fill: #98a84b } -.terminal-3737934755-r3 { fill: #9a9b99 } -.terminal-3737934755-r4 { fill: #608ab1 } -.terminal-3737934755-r5 { fill: #d0b344 } -.terminal-3737934755-r6 { fill: #1984e9;text-decoration: underline; } -.terminal-3737934755-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-3737934755-r8 { fill: #868887;font-style: italic; } + .terminal-3884145572-r1 { fill: #c5c8c6 } +.terminal-3884145572-r2 { fill: #98a84b } +.terminal-3884145572-r3 { fill: #9a9b99 } +.terminal-3884145572-r4 { fill: #608ab1 } +.terminal-3884145572-r5 { fill: #d0b344 } +.terminal-3884145572-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-3884145572-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-3884145572-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core modules list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Modules available from https://github.com/nf-core/modules.git(master):                     - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Module Name                                           -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ abacas                                                │ -│ abricate/run                                          │ -│ abricate/summary                                      │ -│ adapterremoval                                        │ -│ adapterremovalfixprefix                               │ -│ admixture                                             │ -│ affy/justrma                                          │ -│ agat/convertspgff2gtf                                 │ -│ agat/convertspgxf2gxf                                 │ -[..truncated..] + + $ nf-core modules list remote + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Modules available from https://github.com/nf-core/modules.git(master):                     + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +Module Name                                           +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ abacas                                                │ +│ abricate/run                                          │ +│ abricate/summary                                      │ +│ adapterremoval                                        │ +│ adapterremovalfixprefix                               │ +│ admixture                                             │ +│ affy/justrma                                          │ +│ agat/convertspgff2gtf                                 │ +│ agat/convertspgxf2gxf                                 │ +[..truncated..] diff --git a/docs/images/nf-core-modules-patch.svg b/docs/images/nf-core-modules-patch.svg index b0d8a66a95..b5cf52459b 100644 --- a/docs/images/nf-core-modules-patch.svg +++ b/docs/images/nf-core-modules-patch.svg @@ -19,65 +19,65 @@ font-weight: 700; } - .terminal-277091844-matrix { + .terminal-314906117-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-277091844-title { + .terminal-314906117-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-277091844-r1 { fill: #c5c8c6 } -.terminal-277091844-r2 { fill: #98a84b } -.terminal-277091844-r3 { fill: #9a9b99 } -.terminal-277091844-r4 { fill: #608ab1 } -.terminal-277091844-r5 { fill: #d0b344 } -.terminal-277091844-r6 { fill: #cc555a;font-weight: bold } + .terminal-314906117-r1 { fill: #c5c8c6 } +.terminal-314906117-r2 { fill: #98a84b } +.terminal-314906117-r3 { fill: #9a9b99 } +.terminal-314906117-r4 { fill: #608ab1 } +.terminal-314906117-r5 { fill: #d0b344 } +.terminal-314906117-r6 { fill: #cc555a;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -89,22 +89,22 @@ - + - - $ nf-core modules patch fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -ERROR    Module 'modules/nf-core/fastqc' is unchanged. No patch to compute                           + + $ nf-core modules patch fastqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +ERROR    Module 'modules/nf-core/fastqc' is unchanged. No patch to compute                           diff --git a/docs/images/nf-core-modules-remove.svg b/docs/images/nf-core-modules-remove.svg index fb1d3591ff..9ccbb37c09 100644 --- a/docs/images/nf-core-modules-remove.svg +++ b/docs/images/nf-core-modules-remove.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-3673042259-matrix { + .terminal-3716230484-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3673042259-title { + .terminal-3716230484-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3673042259-r1 { fill: #c5c8c6 } -.terminal-3673042259-r2 { fill: #98a84b } -.terminal-3673042259-r3 { fill: #9a9b99 } -.terminal-3673042259-r4 { fill: #608ab1 } -.terminal-3673042259-r5 { fill: #d0b344 } + .terminal-3716230484-r1 { fill: #c5c8c6 } +.terminal-3716230484-r2 { fill: #98a84b } +.terminal-3716230484-r3 { fill: #9a9b99 } +.terminal-3716230484-r4 { fill: #608ab1 } +.terminal-3716230484-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core modules remove abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Removed files for 'abacas' and it's dependencies 'abacas'.                                  + + $ nf-core modules remove abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Removed files for 'abacas' and it's dependencies 'abacas'.                                  diff --git a/docs/images/nf-core-modules-test.svg b/docs/images/nf-core-modules-test.svg index 83f6535612..3c6eeaca9e 100644 --- a/docs/images/nf-core-modules-test.svg +++ b/docs/images/nf-core-modules-test.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - + - + - - $ nf-core modules test samtools/view --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -────────────────────────────────────────── samtools/view ─────────────────────────────────────────── -INFO     Running pytest for module 'samtools/view' + + $ nf-core modules test samtools/view --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +────────────────────────────────────────── samtools/view ─────────────────────────────────────────── +WARNING  You have uncommitted changes. Make sure to commit last changes before running the tests.    +INFO     Running pytest for module 'samtools/view' diff --git a/docs/images/nf-core-modules-update.svg b/docs/images/nf-core-modules-update.svg index a41cbbeec4..b4766f98c4 100644 --- a/docs/images/nf-core-modules-update.svg +++ b/docs/images/nf-core-modules-update.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-3947254567-matrix { + .terminal-2020623448-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3947254567-title { + .terminal-2020623448-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3947254567-r1 { fill: #c5c8c6 } -.terminal-3947254567-r2 { fill: #98a84b } -.terminal-3947254567-r3 { fill: #9a9b99 } -.terminal-3947254567-r4 { fill: #608ab1 } -.terminal-3947254567-r5 { fill: #d0b344 } + .terminal-2020623448-r1 { fill: #c5c8c6 } +.terminal-2020623448-r2 { fill: #98a84b } +.terminal-2020623448-r3 { fill: #9a9b99 } +.terminal-2020623448-r4 { fill: #608ab1 } +.terminal-2020623448-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core modules update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO    'modules/nf-core/abacas' is already up to date                                              -INFO    'modules/nf-core/custom/dumpsoftwareversions' is already up to date                         -INFO    'modules/nf-core/fastqc' is already up to date                                              -INFO    'modules/nf-core/multiqc' is already up to date                                             -INFO     Updates complete ✨                                                                         + + $ nf-core modules update --all --no-preview + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO    'modules/nf-core/abacas' is already up to date                                              +INFO     Updating 'nf-core/custom/dumpsoftwareversions' +INFO     Updating 'nf-core/fastqc' +INFO     Updating 'nf-core/multiqc' +INFO     Updates complete ✨                                                                         diff --git a/docs/images/nf-core-schema-build.svg b/docs/images/nf-core-schema-build.svg index 5fa9746d5a..21bb725c6d 100644 --- a/docs/images/nf-core-schema-build.svg +++ b/docs/images/nf-core-schema-build.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-3395078802-matrix { + .terminal-3584346773-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3395078802-title { + .terminal-3584346773-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3395078802-r1 { fill: #c5c8c6 } -.terminal-3395078802-r2 { fill: #98a84b } -.terminal-3395078802-r3 { fill: #9a9b99 } -.terminal-3395078802-r4 { fill: #608ab1 } -.terminal-3395078802-r5 { fill: #d0b344 } -.terminal-3395078802-r6 { fill: #98a84b;font-weight: bold } -.terminal-3395078802-r7 { fill: #868887;font-weight: bold } -.terminal-3395078802-r8 { fill: #868887 } -.terminal-3395078802-r9 { fill: #4e707b;font-weight: bold } -.terminal-3395078802-r10 { fill: #68a0b3;font-weight: bold } + .terminal-3584346773-r1 { fill: #c5c8c6 } +.terminal-3584346773-r2 { fill: #98a84b } +.terminal-3584346773-r3 { fill: #9a9b99 } +.terminal-3584346773-r4 { fill: #608ab1 } +.terminal-3584346773-r5 { fill: #d0b344 } +.terminal-3584346773-r6 { fill: #98a84b;font-weight: bold } +.terminal-3584346773-r7 { fill: #868887;font-weight: bold } +.terminal-3584346773-r8 { fill: #868887 } +.terminal-3584346773-r9 { fill: #4e707b;font-weight: bold } +.terminal-3584346773-r10 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,23 +96,23 @@ - + - - $ nf-core schema build --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 30 params) -INFO     Writing schema with 31 params: './nextflow_schema.json' + + $ nf-core schema build --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 31 params) +INFO     Writing schema with 32 params: './nextflow_schema.json' diff --git a/docs/images/nf-core-schema-lint.svg b/docs/images/nf-core-schema-lint.svg index a5b8a94fd3..ec9dcbb236 100644 --- a/docs/images/nf-core-schema-lint.svg +++ b/docs/images/nf-core-schema-lint.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - + - + - - $ nf-core schema lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 31 params) + + $ nf-core schema lint + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +WARNING [!] Default parameter 'outdir' is empty or null. It is advisable to remove the default from +the schema +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 32 params) diff --git a/docs/images/nf-core-schema-validate.svg b/docs/images/nf-core-schema-validate.svg index 0ec7e246aa..b7fe83a26e 100644 --- a/docs/images/nf-core-schema-validate.svg +++ b/docs/images/nf-core-schema-validate.svg @@ -19,71 +19,71 @@ font-weight: 700; } - .terminal-3313499933-matrix { + .terminal-3684760685-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3313499933-title { + .terminal-3684760685-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3313499933-r1 { fill: #c5c8c6 } -.terminal-3313499933-r2 { fill: #98a84b } -.terminal-3313499933-r3 { fill: #9a9b99 } -.terminal-3313499933-r4 { fill: #608ab1 } -.terminal-3313499933-r5 { fill: #d0b344 } -.terminal-3313499933-r6 { fill: #98a84b;font-weight: bold } -.terminal-3313499933-r7 { fill: #868887;font-weight: bold } -.terminal-3313499933-r8 { fill: #868887 } -.terminal-3313499933-r9 { fill: #4e707b;font-weight: bold } + .terminal-3684760685-r1 { fill: #c5c8c6 } +.terminal-3684760685-r2 { fill: #98a84b } +.terminal-3684760685-r3 { fill: #9a9b99 } +.terminal-3684760685-r4 { fill: #608ab1 } +.terminal-3684760685-r5 { fill: #d0b344 } +.terminal-3684760685-r6 { fill: #98a84b;font-weight: bold } +.terminal-3684760685-r7 { fill: #868887;font-weight: bold } +.terminal-3684760685-r8 { fill: #868887 } +.terminal-3684760685-r9 { fill: #4e707b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -95,23 +95,23 @@ - + - - $ nf-core schema validate nf-core-rnaseq/workflow nf-params.json - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 93 params) -INFO    [] Input parameters look valid + + $ nf-core schema validate nf-core-rnaseq/3_8 nf-params.json + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 93 params) +INFO    [] Input parameters look valid diff --git a/docs/images/nf-core-subworkflows-create-test.svg b/docs/images/nf-core-subworkflows-create-test.svg index 074b32dd15..9ecec3ff4d 100644 --- a/docs/images/nf-core-subworkflows-create-test.svg +++ b/docs/images/nf-core-subworkflows-create-test.svg @@ -19,84 +19,84 @@ font-weight: 700; } - .terminal-1209424158-matrix { + .terminal-846289372-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1209424158-title { + .terminal-846289372-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1209424158-r1 { fill: #c5c8c6 } -.terminal-1209424158-r2 { fill: #98a84b } -.terminal-1209424158-r3 { fill: #9a9b99 } -.terminal-1209424158-r4 { fill: #608ab1 } -.terminal-1209424158-r5 { fill: #d0b344 } -.terminal-1209424158-r6 { fill: #ff2c7a } -.terminal-1209424158-r7 { fill: #98729f } + .terminal-846289372-r1 { fill: #c5c8c6 } +.terminal-846289372-r2 { fill: #98a84b } +.terminal-846289372-r3 { fill: #9a9b99 } +.terminal-846289372-r4 { fill: #608ab1 } +.terminal-846289372-r5 { fill: #d0b344 } +.terminal-846289372-r6 { fill: #ff2c7a } +.terminal-846289372-r7 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -108,28 +108,28 @@ - + - - $ nf-core subworkflows create-test-yml bam_stats_samtools --no-prompts --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Looking for test workflow entry points:                                                     -'tests/subworkflows/nf-core/bam_stats_samtools/main.nf' -──────────────────────────────────────────────────────────────────────────────────────────────────── -INFO     Building test meta for entry point 'test_bam_stats_samtools' -INFO     Running 'bam_stats_samtools' test with command:                                             -nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools --c ./tests/config/nextflow.config --outdir /tmp/tmp4iszml15 -work-dir /tmp/tmp1r4spwkd + + $ nf-core subworkflows create-test-yml bam_stats_samtools --no-prompts --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Looking for test workflow entry points:                                                     +'tests/subworkflows/nf-core/bam_stats_samtools/main.nf' +──────────────────────────────────────────────────────────────────────────────────────────────────── +INFO     Building test meta for entry point 'test_bam_stats_samtools' +INFO     Running 'bam_stats_samtools' test with command:                                             +nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools +-c ./tests/config/nextflow.config --outdir /tmp/tmporwv9usk -work-dir /tmp/tmpgshpshl3 diff --git a/docs/images/nf-core-subworkflows-create.svg b/docs/images/nf-core-subworkflows-create.svg index 215fce11c0..df2311a2fd 100644 --- a/docs/images/nf-core-subworkflows-create.svg +++ b/docs/images/nf-core-subworkflows-create.svg @@ -19,94 +19,94 @@ font-weight: 700; } - .terminal-3514492232-matrix { + .terminal-3762021705-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3514492232-title { + .terminal-3762021705-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3514492232-r1 { fill: #c5c8c6 } -.terminal-3514492232-r2 { fill: #98a84b } -.terminal-3514492232-r3 { fill: #9a9b99 } -.terminal-3514492232-r4 { fill: #608ab1 } -.terminal-3514492232-r5 { fill: #d0b344 } -.terminal-3514492232-r6 { fill: #68a0b3;font-weight: bold } -.terminal-3514492232-r7 { fill: #98729f } -.terminal-3514492232-r8 { fill: #ff2c7a } + .terminal-3762021705-r1 { fill: #c5c8c6 } +.terminal-3762021705-r2 { fill: #98a84b } +.terminal-3762021705-r3 { fill: #9a9b99 } +.terminal-3762021705-r4 { fill: #608ab1 } +.terminal-3762021705-r5 { fill: #d0b344 } +.terminal-3762021705-r6 { fill: #68a0b3;font-weight: bold } +.terminal-3762021705-r7 { fill: #98729f } +.terminal-3762021705-r8 { fill: #ff2c7a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -118,31 +118,31 @@ - + - - $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Created / edited following files:                                                           -           ./subworkflows/nf-core/bam_stats_samtools/main.nf -           ./subworkflows/nf-core/bam_stats_samtools/meta.yml -           ./tests/subworkflows/nf-core/bam_stats_samtools/main.nf -           ./tests/subworkflows/nf-core/bam_stats_samtools/test.yml -           ./tests/subworkflows/nf-core/bam_stats_samtools/nextflow.config -           ./tests/config/pytest_modules.yml + + $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Repository type: modules +INFO    Press enter to use default values (shown in brackets)or type your own responses.  +ctrl+click underlined text to open links. +INFO     Created / edited following files:                                                           +           ./subworkflows/nf-core/bam_stats_samtools/main.nf +           ./subworkflows/nf-core/bam_stats_samtools/meta.yml +           ./tests/subworkflows/nf-core/bam_stats_samtools/main.nf +           ./tests/subworkflows/nf-core/bam_stats_samtools/test.yml +           ./tests/subworkflows/nf-core/bam_stats_samtools/nextflow.config +           ./tests/config/pytest_modules.yml diff --git a/docs/images/nf-core-subworkflows-info.svg b/docs/images/nf-core-subworkflows-info.svg index bace90622f..ce903ed11b 100644 --- a/docs/images/nf-core-subworkflows-info.svg +++ b/docs/images/nf-core-subworkflows-info.svg @@ -19,304 +19,304 @@ font-weight: 700; } - .terminal-4158441153-matrix { + .terminal-1571342018-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4158441153-title { + .terminal-1571342018-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4158441153-r1 { fill: #c5c8c6 } -.terminal-4158441153-r2 { fill: #98a84b } -.terminal-4158441153-r3 { fill: #9a9b99 } -.terminal-4158441153-r4 { fill: #608ab1 } -.terminal-4158441153-r5 { fill: #d0b344 } -.terminal-4158441153-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-4158441153-r7 { fill: #98a84b;font-weight: bold } -.terminal-4158441153-r8 { fill: #868887 } -.terminal-4158441153-r9 { fill: #d08442 } -.terminal-4158441153-r10 { fill: #868887;font-style: italic; } -.terminal-4158441153-r11 { fill: #98729f } + .terminal-1571342018-r1 { fill: #c5c8c6 } +.terminal-1571342018-r2 { fill: #98a84b } +.terminal-1571342018-r3 { fill: #9a9b99 } +.terminal-1571342018-r4 { fill: #608ab1 } +.terminal-1571342018-r5 { fill: #d0b344 } +.terminal-1571342018-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-1571342018-r7 { fill: #98a84b;font-weight: bold } +.terminal-1571342018-r8 { fill: #868887 } +.terminal-1571342018-r9 { fill: #d08442 } +.terminal-1571342018-r10 { fill: #868887;font-style: italic; } +.terminal-1571342018-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -328,100 +328,100 @@ - + - - $ nf-core subworkflows info bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -╭─ Subworkflow: bam_rseqc  ────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package                        │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -                       ╷                                                                   ╷         -📥 Inputs             Description                                                        Pattern -╺━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━╸ - meta  (map)          │Groovy Map containing sample information e.g. [ id:'test',         │ -                       │single_end:false ]                                                 │ -╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ - bam  (file)          │BAM file to calculate statistics                                   │*.{bam} -╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ - bai  (file)          │Index for input BAM file                                           │*.{bai} -╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ - bed  (file)          │BED file for the reference gene model                              │*.{bed} -╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ - rseqc_modules  (list)│List of rseqc modules to run e.g. [ 'bam_stat', 'infer_experiment' │ -                       │]                                                                  │ -                       ╵                                                                   ╵         -                                     ╷                                   ╷                           -📤 Outputs                          Description                                          Pattern -╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━╸ - bamstat_txt  (file)                │bam statistics report              │           *.bam_stat.txt -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_distance  (file)     │the inner distances                │     *.inner_distance.txt -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_freq  (file)         │frequencies of different insert    │*.inner_distance_freq.txt -                                     │sizes                              │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_mean  (file)         │mean/median values of inner        │*.inner_distance_mean.txt -                                     │distances                          │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_pdf  (file)          │distribution plot of inner         │*.inner_distance_plot.pdf -                                     │distances                          │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_rscript  (file)      │script to reproduce the plot       │  *.inner_distance_plot.R -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - inferexperiment_txt  (file)        │infer_experiment results report    │   *.infer_experiment.txt -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_bed  (file)     │bed file of annotated junctions    │           *.junction.bed -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_interact_bed   │Interact bed file                  │           *.Interact.bed -(file)                              │                                   │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_xls  (file)     │xls file with junction information │                    *.xls -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_pdf  (file)     │junction plot                      │           *.junction.pdf -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_events_pdf     │events plot                        │             *.events.pdf -(file)                              │                                   │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_rscript  (file) │Rscript to reproduce the plots     │                      *.r -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_log  (file)     │Log file generated by tool         │                    *.log -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionsaturation_pdf  (file)     │Junction saturation report         │                    *.pdf -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionsaturation_rscript  (file) │Junction saturation R-script       │                      *.r -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readdistribution_txt  (file)       │the read distribution report       │  *.read_distribution.txt -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readduplication_seq_xls  (file)    │Read duplication rate determined   │         *seq.DupRate.xls -                                     │from mapping position of read      │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readduplication_pos_xls  (file)    │Read duplication rate determined   │         *pos.DupRate.xls -                                     │from sequence of read              │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readduplication_pdf  (file)        │plot of duplication rate           │                    *.pdf -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readduplication_rscript  (file)    │script to reproduce the plot       │                      *.R -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - tin_txt  (file)                    │TXT file containing tin.py results │                    *.txt -                                     │summary                            │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - versions  (file)                   │File containing software versions  │             versions.yml -                                     ╵                                   ╵                           - - 💻  Installation command: nf-core subworkflows install bam_rseqc - + + $ nf-core subworkflows info bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +╭─ Subworkflow: bam_rseqc  ────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ +│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package                        │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +                       ╷                                                                   ╷         +📥 Inputs             Description                                                        Pattern +╺━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━╸ + meta  (map)          │Groovy Map containing sample information e.g. [ id:'test',         │ +                       │single_end:false ]                                                 │ +╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ + bam  (file)          │BAM file to calculate statistics                                   │*.{bam} +╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ + bai  (file)          │Index for input BAM file                                           │*.{bai} +╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ + bed  (file)          │BED file for the reference gene model                              │*.{bed} +╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ + rseqc_modules  (list)│List of rseqc modules to run e.g. [ 'bam_stat', 'infer_experiment' │ +                       │]                                                                  │ +                       ╵                                                                   ╵         +                                     ╷                                   ╷                           +📤 Outputs                          Description                                          Pattern +╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━╸ + bamstat_txt  (file)                │bam statistics report              │           *.bam_stat.txt +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_distance  (file)     │the inner distances                │     *.inner_distance.txt +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_freq  (file)         │frequencies of different insert    │*.inner_distance_freq.txt +                                     │sizes                              │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_mean  (file)         │mean/median values of inner        │*.inner_distance_mean.txt +                                     │distances                          │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_pdf  (file)          │distribution plot of inner         │*.inner_distance_plot.pdf +                                     │distances                          │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_rscript  (file)      │script to reproduce the plot       │  *.inner_distance_plot.R +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + inferexperiment_txt  (file)        │infer_experiment results report    │   *.infer_experiment.txt +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_bed  (file)     │bed file of annotated junctions    │           *.junction.bed +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_interact_bed   │Interact bed file                  │           *.Interact.bed +(file)                              │                                   │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_xls  (file)     │xls file with junction information │                    *.xls +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_pdf  (file)     │junction plot                      │           *.junction.pdf +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_events_pdf     │events plot                        │             *.events.pdf +(file)                              │                                   │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_rscript  (file) │Rscript to reproduce the plots     │                      *.r +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_log  (file)     │Log file generated by tool         │                    *.log +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionsaturation_pdf  (file)     │Junction saturation report         │                    *.pdf +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionsaturation_rscript  (file) │Junction saturation R-script       │                      *.r +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readdistribution_txt  (file)       │the read distribution report       │  *.read_distribution.txt +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readduplication_seq_xls  (file)    │Read duplication rate determined   │         *seq.DupRate.xls +                                     │from mapping position of read      │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readduplication_pos_xls  (file)    │Read duplication rate determined   │         *pos.DupRate.xls +                                     │from sequence of read              │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readduplication_pdf  (file)        │plot of duplication rate           │                    *.pdf +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readduplication_rscript  (file)    │script to reproduce the plot       │                      *.R +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + tin_txt  (file)                    │TXT file containing tin.py results │                    *.txt +                                     │summary                            │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + versions  (file)                   │File containing software versions  │             versions.yml +                                     ╵                                   ╵                           + + 💻  Installation command: nf-core subworkflows install bam_rseqc + diff --git a/docs/images/nf-core-subworkflows-install.svg b/docs/images/nf-core-subworkflows-install.svg index 7363919195..7dfa30a301 100644 --- a/docs/images/nf-core-subworkflows-install.svg +++ b/docs/images/nf-core-subworkflows-install.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-1929907304-matrix { + .terminal-1967000681-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1929907304-title { + .terminal-1967000681-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1929907304-r1 { fill: #c5c8c6 } -.terminal-1929907304-r2 { fill: #98a84b } -.terminal-1929907304-r3 { fill: #9a9b99 } -.terminal-1929907304-r4 { fill: #608ab1 } -.terminal-1929907304-r5 { fill: #d0b344 } + .terminal-1967000681-r1 { fill: #c5c8c6 } +.terminal-1967000681-r2 { fill: #98a84b } +.terminal-1967000681-r3 { fill: #9a9b99 } +.terminal-1967000681-r4 { fill: #608ab1 } +.terminal-1967000681-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core subworkflows install bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Installing 'bam_rseqc' + + $ nf-core subworkflows install bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Installing 'bam_rseqc' diff --git a/docs/images/nf-core-subworkflows-list-local.svg b/docs/images/nf-core-subworkflows-list-local.svg index dc2836367d..cd62723604 100644 --- a/docs/images/nf-core-subworkflows-list-local.svg +++ b/docs/images/nf-core-subworkflows-list-local.svg @@ -19,67 +19,67 @@ font-weight: 700; } - .terminal-1394611191-matrix { + .terminal-1433277432-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1394611191-title { + .terminal-1433277432-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1394611191-r1 { fill: #c5c8c6 } -.terminal-1394611191-r2 { fill: #98a84b } -.terminal-1394611191-r3 { fill: #9a9b99 } -.terminal-1394611191-r4 { fill: #608ab1 } -.terminal-1394611191-r5 { fill: #d0b344 } + .terminal-1433277432-r1 { fill: #c5c8c6 } +.terminal-1433277432-r2 { fill: #98a84b } +.terminal-1433277432-r3 { fill: #9a9b99 } +.terminal-1433277432-r4 { fill: #608ab1 } +.terminal-1433277432-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -91,23 +91,23 @@ - + - - $ nf-core subworkflows list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     No nf-core subworkflows found in '.' - + + $ nf-core subworkflows list local + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     No nf-core subworkflows found in '.' + diff --git a/docs/images/nf-core-subworkflows-list-remote.svg b/docs/images/nf-core-subworkflows-list-remote.svg index 025ee22a18..a14e7b006d 100644 --- a/docs/images/nf-core-subworkflows-list-remote.svg +++ b/docs/images/nf-core-subworkflows-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-1279347052-matrix { + .terminal-1417169261-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1279347052-title { + .terminal-1417169261-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1279347052-r1 { fill: #c5c8c6 } -.terminal-1279347052-r2 { fill: #98a84b } -.terminal-1279347052-r3 { fill: #9a9b99 } -.terminal-1279347052-r4 { fill: #608ab1 } -.terminal-1279347052-r5 { fill: #d0b344 } -.terminal-1279347052-r6 { fill: #1984e9;text-decoration: underline; } -.terminal-1279347052-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-1279347052-r8 { fill: #868887;font-style: italic; } + .terminal-1417169261-r1 { fill: #c5c8c6 } +.terminal-1417169261-r2 { fill: #98a84b } +.terminal-1417169261-r3 { fill: #9a9b99 } +.terminal-1417169261-r4 { fill: #608ab1 } +.terminal-1417169261-r5 { fill: #d0b344 } +.terminal-1417169261-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-1417169261-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-1417169261-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core subworkflows list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Subworkflow Name                              -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ bam_create_som_pon_gatk                       │ -│ bam_dedup_stats_samtools_umitools             │ -│ bam_docounts_contamination_angsd              │ -│ bam_markduplicates_picard                     │ -│ bam_ngscheckmate                              │ -│ bam_qc_picard                                 │ -│ bam_rseqc                                     │ -│ bam_sort_stats_samtools                       │ -│ bam_split_by_region                           │ -[..truncated..] + + $ nf-core subworkflows list remote + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +Subworkflow Name                              +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ bam_create_som_pon_gatk                       │ +│ bam_dedup_stats_samtools_umitools             │ +│ bam_docounts_contamination_angsd              │ +│ bam_markduplicates_picard                     │ +│ bam_ngscheckmate                              │ +│ bam_qc_picard                                 │ +│ bam_rseqc                                     │ +│ bam_sort_stats_samtools                       │ +│ bam_split_by_region                           │ +[..truncated..] diff --git a/docs/images/nf-core-subworkflows-remove.svg b/docs/images/nf-core-subworkflows-remove.svg index 637ad9101c..055c1c378a 100644 --- a/docs/images/nf-core-subworkflows-remove.svg +++ b/docs/images/nf-core-subworkflows-remove.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-2213080961-matrix { + .terminal-2348150658-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2213080961-title { + .terminal-2348150658-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2213080961-r1 { fill: #c5c8c6 } -.terminal-2213080961-r2 { fill: #98a84b } -.terminal-2213080961-r3 { fill: #9a9b99 } -.terminal-2213080961-r4 { fill: #608ab1 } -.terminal-2213080961-r5 { fill: #d0b344 } + .terminal-2348150658-r1 { fill: #c5c8c6 } +.terminal-2348150658-r2 { fill: #98a84b } +.terminal-2348150658-r3 { fill: #9a9b99 } +.terminal-2348150658-r4 { fill: #608ab1 } +.terminal-2348150658-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core subworkflows remove bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -INFO     Removed files for 'rseqc/bamstat' and it's dependencies 'rseqc/bamstat'.                    -INFO     Removed files for 'rseqc/inferexperiment' and it's dependencies 'rseqc/inferexperiment'.    -INFO     Removed files for 'rseqc/innerdistance' and it's dependencies 'rseqc/innerdistance'.        -INFO     Removed files for 'bam_rseqc' and it's dependencies 'bam_rseqc, rseqc_bamstat,  -rseqc_inferexperiment, rseqc_innerdistance'.                                                + + $ nf-core subworkflows remove bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +INFO     Removed files for 'rseqc/bamstat' and it's dependencies 'rseqc/bamstat'.                    +INFO     Removed files for 'rseqc/inferexperiment' and it's dependencies 'rseqc/inferexperiment'.    +INFO     Removed files for 'rseqc/innerdistance' and it's dependencies 'rseqc/innerdistance'.        +INFO     Removed files for 'bam_rseqc' and it's dependencies 'bam_rseqc, rseqc_bamstat,  +rseqc_inferexperiment, rseqc_innerdistance'.                                                diff --git a/docs/images/nf-core-subworkflows-test.svg b/docs/images/nf-core-subworkflows-test.svg index f9ba18cc79..459502886a 100644 --- a/docs/images/nf-core-subworkflows-test.svg +++ b/docs/images/nf-core-subworkflows-test.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + - + - + - - $ nf-core subworkflows test bam_rseqc --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -──────────────────────────────────────────── bam_rseqc ───────────────────────────────────────────── -INFO     Running pytest for subworkflow 'bam_rseqc' + + $ nf-core subworkflows test bam_rseqc --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +──────────────────────────────────────────── bam_rseqc ───────────────────────────────────────────── +WARNING  You have uncommitted changes. Make sure to commit last changes before running the tests.    +INFO     Running pytest for subworkflow 'bam_rseqc' diff --git a/docs/images/nf-core-subworkflows-update.svg b/docs/images/nf-core-subworkflows-update.svg index a9844d80a9..e58178193d 100644 --- a/docs/images/nf-core-subworkflows-update.svg +++ b/docs/images/nf-core-subworkflows-update.svg @@ -19,65 +19,65 @@ font-weight: 700; } - .terminal-3054113778-matrix { + .terminal-3091928051-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3054113778-title { + .terminal-3091928051-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3054113778-r1 { fill: #c5c8c6 } -.terminal-3054113778-r2 { fill: #98a84b } -.terminal-3054113778-r3 { fill: #9a9b99 } -.terminal-3054113778-r4 { fill: #608ab1 } -.terminal-3054113778-r5 { fill: #d0b344 } -.terminal-3054113778-r6 { fill: #cc555a;font-weight: bold } + .terminal-3091928051-r1 { fill: #c5c8c6 } +.terminal-3091928051-r2 { fill: #98a84b } +.terminal-3091928051-r3 { fill: #9a9b99 } +.terminal-3091928051-r4 { fill: #608ab1 } +.terminal-3091928051-r5 { fill: #d0b344 } +.terminal-3091928051-r6 { fill: #cc555a;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -89,22 +89,22 @@ - + - - $ nf-core subworkflows update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - - -ERROR   'rseqc/junctionannotation' + + $ nf-core subworkflows update --all --no-preview + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + + +ERROR   'rseqc/junctionannotation' diff --git a/docs/images/nf-core-sync.svg b/docs/images/nf-core-sync.svg index 7305dda430..d8c77548b5 100644 --- a/docs/images/nf-core-sync.svg +++ b/docs/images/nf-core-sync.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-1818757582-matrix { + .terminal-1911884239-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1818757582-title { + .terminal-1911884239-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1818757582-r1 { fill: #c5c8c6 } -.terminal-1818757582-r2 { fill: #98a84b } -.terminal-1818757582-r3 { fill: #9a9b99 } -.terminal-1818757582-r4 { fill: #608ab1 } -.terminal-1818757582-r5 { fill: #d0b344 } -.terminal-1818757582-r6 { fill: #98729f } -.terminal-1818757582-r7 { fill: #ff2c7a } + .terminal-1911884239-r1 { fill: #c5c8c6 } +.terminal-1911884239-r2 { fill: #98a84b } +.terminal-1911884239-r3 { fill: #9a9b99 } +.terminal-1911884239-r4 { fill: #608ab1 } +.terminal-1911884239-r5 { fill: #d0b344 } +.terminal-1911884239-r6 { fill: #98729f } +.terminal-1911884239-r7 { fill: #ff2c7a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,24 +96,24 @@ - + - - $ nf-core sync - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.8 - https://nf-co.re - - -INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing -INFO     Original pipeline repository branch is 'master' -INFO     Deleting all files in 'TEMPLATE' branch                                                     -INFO     Making a new template pipeline using pipeline variables                                     + + $ nf-core sync + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.9 - https://nf-co.re + + +INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing +INFO     Original pipeline repository branch is 'master' +INFO     Deleting all files in 'TEMPLATE' branch                                                     +INFO     Making a new template pipeline using pipeline variables                                     diff --git a/nf_core/__init__.py b/nf_core/__init__.py index e333335280..d96be73f3d 100644 --- a/nf_core/__init__.py +++ b/nf_core/__init__.py @@ -3,6 +3,6 @@ Shouldn't do much, as everything is under subcommands. """ -import pkg_resources +import importlib.metadata as importlib_metadata -__version__ = pkg_resources.get_distribution("nf_core").version +__version__ = importlib_metadata.version(__name__) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index e03fcbc67c..d57d27f1e6 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -10,26 +10,17 @@ import rich.traceback import rich_click as click -import nf_core -import nf_core.bump_version -import nf_core.create -import nf_core.download -import nf_core.launch -import nf_core.licences -import nf_core.lint -import nf_core.list -import nf_core.modules -import nf_core.schema -import nf_core.subworkflows -import nf_core.sync -import nf_core.utils +from nf_core import __version__ +from nf_core.download import DownloadError +from nf_core.modules.modules_repo import NF_CORE_MODULES_REMOTE +from nf_core.utils import check_if_outdated, rich_force_colors, setup_nfcore_dir # Set up logging as the root logger # Submodules should all traverse back to this log = logging.getLogger() # Set up .nfcore directory for storing files between sessions -nf_core.utils.setup_nfcore_dir() +setup_nfcore_dir() # Set up nicer formatting of click cli help messages click.rich_click.MAX_WIDTH = 100 @@ -71,13 +62,27 @@ } # Set up rich stderr console -stderr = rich.console.Console(stderr=True, force_terminal=nf_core.utils.rich_force_colors()) -stdout = rich.console.Console(force_terminal=nf_core.utils.rich_force_colors()) +stderr = rich.console.Console(stderr=True, force_terminal=rich_force_colors()) +stdout = rich.console.Console(force_terminal=rich_force_colors()) # Set up the rich traceback rich.traceback.install(console=stderr, width=200, word_wrap=True, extra_lines=1) +# Define exceptions for which no traceback should be printed, +# because they are actually preliminary, but intended program terminations. +# (Custom exceptions are cleaner than `sys.exit(1)`, which we used before) +def selective_traceback_hook(exctype, value, traceback): + if exctype in {DownloadError}: # extend set as needed + log.error(value) + else: + # print the colored traceback for all other exceptions with rich as usual + stderr.print(rich.traceback.Traceback.from_exception(exctype, value, traceback)) + + +sys.excepthook = selective_traceback_hook + + def run_nf_core(): # print nf-core header if environment variable is not set if os.environ.get("_NF_CORE_COMPLETE") is None: @@ -88,11 +93,11 @@ def run_nf_core(): stderr.print(r"[blue] | \| | \__, \__/ | \ |___ [green]\`-._,-`-,", highlight=False) stderr.print("[green] `._,._,'\n", highlight=False) stderr.print( - f"[grey39] nf-core/tools version {nf_core.__version__} - [link=https://nf-co.re]https://nf-co.re[/]", + f"[grey39] nf-core/tools version {__version__} - [link=https://nf-co.re]https://nf-co.re[/]", highlight=False, ) try: - is_outdated, _, remote_vers = nf_core.utils.check_if_outdated() + is_outdated, _, remote_vers = check_if_outdated() if is_outdated: stderr.print( f"[bold bright_yellow] There is a new version of nf-core/tools available! ({remote_vers})", @@ -106,7 +111,7 @@ def run_nf_core(): @click.group(context_settings=dict(help_option_names=["-h", "--help"])) -@click.version_option(nf_core.__version__) +@click.version_option(__version__) @click.option("-v", "--verbose", is_flag=True, default=False, help="Print verbose output to the console.") @click.option("--hide-progress", is_flag=True, default=False, help="Don't show progress bars.") @click.option("-l", "--log-file", help="Save a verbose log to a file.", metavar="") @@ -124,7 +129,7 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): log.addHandler( rich.logging.RichHandler( level=logging.DEBUG if verbose else logging.INFO, - console=rich.console.Console(stderr=True, force_terminal=nf_core.utils.rich_force_colors()), + console=rich.console.Console(stderr=True, force_terminal=rich_force_colors()), show_time=False, show_path=verbose, # True if verbose, false otherwise markup=True, @@ -163,7 +168,9 @@ def list(keywords, sort, json, show_archived): Checks the web for a list of nf-core pipelines with their latest releases. Shows which nf-core pipelines you have pulled locally and whether they are up to date. """ - stdout.print(nf_core.list.list_workflows(keywords, sort, json, show_archived)) + from nf_core.list import list_workflows + + stdout.print(list_workflows(keywords, sort, json, show_archived)) # nf-core launch @@ -207,9 +214,9 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all Run using a remote pipeline name (such as GitHub `user/repo` or a URL), a local pipeline directory or an ID from the nf-core web launch tool. """ - launcher = nf_core.launch.Launch( - pipeline, revision, command_only, params_in, params_out, save_all, show_hidden, url, id - ) + from nf_core.launch import Launch + + launcher = Launch(pipeline, revision, command_only, params_in, params_out, save_all, show_hidden, url, id) if not launcher.launch_pipeline(): sys.exit(1) @@ -217,29 +224,87 @@ def launch(pipeline, id, revision, command_only, params_in, params_out, save_all # nf-core download @nf_core_cli.command() @click.argument("pipeline", required=False, metavar="") -@click.option("-r", "--revision", type=str, help="Pipeline release") +@click.option( + "-r", + "--revision", + multiple=True, + help="Pipeline release to download. Multiple invocations are possible, e.g. `-r 1.1 -r 1.2`", +) @click.option("-o", "--outdir", type=str, help="Output directory") @click.option( "-x", "--compress", type=click.Choice(["tar.gz", "tar.bz2", "zip", "none"]), help="Archive compression type" ) @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite existing files") +@click.option("-t", "--tower", is_flag=True, default=False, help="Download for seqeralabs® Nextflow Tower") +@click.option( + "-d", + "--download-configuration", + is_flag=True, + default=False, + help="Include configuration profiles in download. Not available with `--tower`", +) +# -c changed to -s for consistency with other --container arguments, where it is always the first letter of the last word. +# Also -c might be used instead of -d for config in a later release, but reusing params for different options in two subsequent releases might be too error-prone. +@click.option( + "-s", + "--container-system", + type=click.Choice(["none", "singularity"]), + help="Download container images of required software.", +) +@click.option( + "-l", + "--container-library", + multiple=True, + help="Container registry/library or mirror to pull images from.", +) @click.option( - "-c", "--container", type=click.Choice(["none", "singularity"]), help="Download software container images" + "-u", + "--container-cache-utilisation", + type=click.Choice(["amend", "copy", "remote"]), + help="Utilise a `singularity.cacheDir` in the download process, if applicable.", ) @click.option( - "--singularity-cache-only/--singularity-cache-copy", - help="Don't / do copy images to the output directory and set 'singularity.cacheDir' in workflow", + "-i", + "--container-cache-index", + type=str, + help="List of images already available in a remote `singularity.cacheDir`.", ) @click.option("-p", "--parallel-downloads", type=int, default=4, help="Number of parallel image downloads") -def download(pipeline, revision, outdir, compress, force, container, singularity_cache_only, parallel_downloads): +def download( + pipeline, + revision, + outdir, + compress, + force, + tower, + download_configuration, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, +): """ Download a pipeline, nf-core/configs and pipeline singularity images. Collects all files in a single archive and configures the downloaded workflow to use relative paths to the configs and singularity images. """ - dl = nf_core.download.DownloadWorkflow( - pipeline, revision, outdir, compress, force, container, singularity_cache_only, parallel_downloads + from nf_core.download import DownloadWorkflow + + dl = DownloadWorkflow( + pipeline, + revision, + outdir, + compress, + force, + tower, + download_configuration, + container_system, + container_library, + container_cache_utilisation, + container_cache_index, + parallel_downloads, ) dl.download_workflow() @@ -256,8 +321,9 @@ def licences(pipeline, json): Each of these is queried against the anaconda.org API to find the licence. Package name, version and licence is printed to the command line. """ + from nf_core.licences import WorkflowLicences - lic = nf_core.licences.WorkflowLicences(pipeline) + lic = WorkflowLicences(pipeline) lic.as_json = json try: stdout.print(lic.run_licences()) @@ -289,10 +355,10 @@ def create(name, description, author, version, no_git, force, outdir, template_y Uses the nf-core template to make a skeleton Nextflow pipeline with all required files, boilerplate code and best-practices. """ + from nf_core.create import PipelineCreate + try: - create_obj = nf_core.create.PipelineCreate( - name, description, author, version, no_git, force, outdir, template_yaml, plain - ) + create_obj = PipelineCreate(name, description, author, version, no_git, force, outdir, template_yaml, plain) create_obj.init_pipeline() except UserWarning as e: log.error(e) @@ -344,17 +410,19 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. See the documentation for details. """ + from nf_core.lint import run_linting + from nf_core.utils import is_pipeline_directory # Check if pipeline directory is a pipeline try: - nf_core.utils.is_pipeline_directory(dir) + is_pipeline_directory(dir) except UserWarning as e: log.error(e) sys.exit(1) # Run the lint tests! try: - lint_obj, module_lint_obj = nf_core.lint.run_linting( + lint_obj, module_lint_obj = run_linting( dir, release, fix, @@ -383,7 +451,7 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma "-g", "--git-remote", type=str, - default=nf_core.modules.modules_repo.NF_CORE_MODULES_REMOTE, + default=NF_CORE_MODULES_REMOTE, help="Remote git repo to fetch files from", ) @click.option("-b", "--branch", type=str, default=None, help="Branch of git repository hosting modules.") @@ -415,7 +483,7 @@ def modules(ctx, git_remote, branch, no_pull): "-g", "--git-remote", type=str, - default=nf_core.modules.modules_repo.NF_CORE_MODULES_REMOTE, + default=NF_CORE_MODULES_REMOTE, help="Remote git repo to fetch files from", ) @click.option("-b", "--branch", type=str, default=None, help="Branch of git repository hosting modules.") @@ -460,8 +528,10 @@ def remote(ctx, keywords, json): """ List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ + from nf_core.modules import ModuleList + try: - module_list = nf_core.modules.ModuleList( + module_list = ModuleList( None, True, ctx.obj["modules_repo_url"], @@ -490,8 +560,10 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ + from nf_core.modules import ModuleList + try: - module_list = nf_core.modules.ModuleList( + module_list = ModuleList( dir, False, ctx.obj["modules_repo_url"], @@ -524,8 +596,10 @@ def install(ctx, tool, dir, prompt, force, sha): Fetches and installs module files from a remote repo e.g. nf-core/modules. """ + from nf_core.modules import ModuleInstall + try: - module_install = nf_core.modules.ModuleInstall( + module_install = ModuleInstall( dir, force, prompt, @@ -585,8 +659,10 @@ def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff, update_d Fetches and updates module files from a remote repo e.g. nf-core/modules. """ + from nf_core.modules import ModuleUpdate + try: - module_install = nf_core.modules.ModuleUpdate( + module_install = ModuleUpdate( dir, force, prompt, @@ -626,8 +702,10 @@ def patch(ctx, tool, dir, remove): Checks if a module has been modified locally and creates a patch file describing how the module has changed from the remote version """ + from nf_core.modules import ModulePatch + try: - module_patch = nf_core.modules.ModulePatch( + module_patch = ModulePatch( dir, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -657,8 +735,10 @@ def remove(ctx, dir, tool): """ Remove a module from a pipeline. """ + from nf_core.modules import ModuleRemove + try: - module_remove = nf_core.modules.ModuleRemove( + module_remove = ModuleRemove( dir, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -710,9 +790,11 @@ def create_module( elif no_meta: has_meta = False + from nf_core.modules import ModuleCreate + # Run function try: - module_create = nf_core.modules.ModuleCreate( + module_create = ModuleCreate( dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template ) module_create.create() @@ -739,8 +821,10 @@ def create_test_yml(ctx, tool, run_tests, output, force, no_prompts): Given the name of a module, runs the Nextflow test command and automatically generate the required `test.yml` file based on the output files. """ + from nf_core.modules import ModulesTestYmlBuilder + try: - meta_builder = nf_core.modules.ModulesTestYmlBuilder( + meta_builder = ModulesTestYmlBuilder( module_name=tool, run_tests=run_tests, test_yml_output_path=output, @@ -760,6 +844,14 @@ def create_test_yml(ctx, tool, run_tests, output, force, no_prompts): @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") +@click.option( + "-r", + "--registry", + type=str, + metavar="", + default=None, + help="Registry to use for containers. If not specified it will use docker.registry value in the nextflow.config file", +) @click.option("-k", "--key", type=str, metavar="", multiple=True, help="Run only these lint tests") @click.option("-a", "--all", is_flag=True, help="Run on all modules") @click.option("-w", "--fail-warned", is_flag=True, help="Convert warn tests to failures") @@ -774,7 +866,7 @@ def create_test_yml(ctx, tool, run_tests, output, force, no_prompts): ) @click.option("--fix-version", is_flag=True, help="Fix the module version if a newer version is available") def lint( - ctx, tool, dir, key, all, fail_warned, local, passed, sort_by, fix_version + ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version ): # pylint: disable=redefined-outer-name """ Lint one or more modules in a directory. @@ -785,10 +877,14 @@ def lint( Test modules within a pipeline or a clone of the nf-core/modules repository. """ + from nf_core.modules import ModuleLint + from nf_core.modules.lint import ModuleLintException + try: - module_lint = nf_core.modules.ModuleLint( + module_lint = ModuleLint( dir, fail_warned=fail_warned, + registry=ctx.params["registry"], remote_url=ctx.obj["modules_repo_url"], branch=ctx.obj["modules_repo_branch"], no_pull=ctx.obj["modules_repo_no_pull"], @@ -796,6 +892,7 @@ def lint( ) module_lint.lint( module=tool, + registry=registry, key=key, all_modules=all, print_results=True, @@ -806,7 +903,7 @@ def lint( ) if len(module_lint.failed) > 0: sys.exit(1) - except nf_core.modules.lint.ModuleLintException as e: + except ModuleLintException as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -837,8 +934,10 @@ def info(ctx, tool, dir): will print this usage info. If not, usage from the remote modules repo will be shown. """ + from nf_core.modules import ModuleInfo + try: - module_info = nf_core.modules.ModuleInfo( + module_info = ModuleInfo( dir, tool, ctx.obj["modules_repo_url"], @@ -863,15 +962,18 @@ def bump_versions(ctx, tool, dir, all, show_all): Bump versions for one or more modules in a clone of the nf-core/modules repo. """ + from nf_core.modules.bump_versions import ModuleVersionBumper + from nf_core.modules.modules_utils import ModuleException + try: - version_bumper = nf_core.modules.bump_versions.ModuleVersionBumper( + version_bumper = ModuleVersionBumper( dir, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) version_bumper.bump_versions(module=tool, all_modules=all, show_uptodate=show_all) - except nf_core.modules.modules_utils.ModuleException as e: + except ModuleException as e: log.error(e) sys.exit(1) except (UserWarning, LookupError) as e: @@ -891,8 +993,10 @@ def test_module(ctx, tool, no_prompts, pytest_args): Given the name of a module, runs the Nextflow test command. """ + from nf_core.modules import ModulesTest + try: - meta_builder = nf_core.modules.ModulesTest(tool, no_prompts, pytest_args) + meta_builder = ModulesTest(tool, no_prompts, pytest_args) meta_builder.run() except (UserWarning, LookupError) as e: log.critical(e) @@ -916,10 +1020,11 @@ def create_subworkflow(ctx, subworkflow, dir, author, force): If the specified directory is a clone of nf-core/modules, it creates or modifies files in 'subworkflows/', 'tests/subworkflows' and 'tests/config/pytest_modules.yml' """ + from nf_core.subworkflows import SubworkflowCreate # Run function try: - subworkflow_create = nf_core.subworkflows.SubworkflowCreate(dir, subworkflow, author, force) + subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force) subworkflow_create.create() except UserWarning as e: log.critical(e) @@ -944,8 +1049,10 @@ def create_test_yml(ctx, subworkflow, run_tests, output, force, no_prompts): Given the name of a module, runs the Nextflow test command and automatically generate the required `test.yml` file based on the output files. """ + from nf_core.subworkflows import SubworkflowTestYmlBuilder + try: - meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + meta_builder = SubworkflowTestYmlBuilder( subworkflow=subworkflow, run_tests=run_tests, test_yml_output_path=output, @@ -979,15 +1086,18 @@ def remote(ctx, keywords, json): """ List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ + from nf_core.subworkflows import SubworkflowList + try: - subworkflows_list = nf_core.subworkflows.SubworkflowList( + subworkflow_list = SubworkflowList( None, True, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) - stdout.print(subworkflows_list.list_subworkflows(keywords, json)) + + stdout.print(subworkflow_list.list_components(keywords, json)) except (UserWarning, LookupError) as e: log.critical(e) sys.exit(1) @@ -1009,15 +1119,17 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ + from nf_core.subworkflows import SubworkflowList + try: - subworkflows_list = nf_core.subworkflows.SubworkflowList( + subworkflow_list = SubworkflowList( dir, False, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) - stdout.print(subworkflows_list.list_subworkflows(keywords, json)) + stdout.print(subworkflow_list.list_components(keywords, json)) except (UserWarning, LookupError) as e: log.error(e) sys.exit(1) @@ -1046,8 +1158,10 @@ def info(ctx, tool, dir): will print this usage info. If not, usage from the remote subworkflows repo will be shown. """ + from nf_core.subworkflows import SubworkflowInfo + try: - subworkflow_info = nf_core.subworkflows.SubworkflowInfo( + subworkflow_info = SubworkflowInfo( dir, tool, ctx.obj["modules_repo_url"], @@ -1072,8 +1186,10 @@ def test_subworkflow(ctx, subworkflow, no_prompts, pytest_args): Given the name of a subworkflow, runs the Nextflow test command. """ + from nf_core.subworkflows import SubworkflowsTest + try: - meta_builder = nf_core.subworkflows.SubworkflowsTest(subworkflow, no_prompts, pytest_args) + meta_builder = SubworkflowsTest(subworkflow, no_prompts, pytest_args) meta_builder.run() except (UserWarning, LookupError) as e: log.critical(e) @@ -1102,8 +1218,10 @@ def install(ctx, subworkflow, dir, prompt, force, sha): Fetches and installs subworkflow files from a remote repo e.g. nf-core/modules. """ + from nf_core.subworkflows import SubworkflowInstall + try: - subworkflow_install = nf_core.subworkflows.SubworkflowInstall( + subworkflow_install = SubworkflowInstall( dir, force, prompt, @@ -1121,70 +1239,6 @@ def install(ctx, subworkflow, dir, prompt, force, sha): sys.exit(1) -# nf-core subworkflows list subcommands -@subworkflows.group() -@click.pass_context -def list(ctx): - """ - List modules in a local pipeline or remote repository. - """ - pass - - -# nf-core subworkflows list remote -@list.command() -@click.pass_context -@click.argument("keywords", required=False, nargs=-1, metavar="") -@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def remote(ctx, keywords, json): - """ - List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. - """ - try: - subworkflow_list = nf_core.subworkflows.SubworkflowList( - None, - True, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - - stdout.print(subworkflow_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.critical(e) - sys.exit(1) - - -# nf-core subworkflows list local -@list.command() -@click.pass_context -@click.argument("keywords", required=False, nargs=-1, metavar="") -@click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -@click.option( - "-d", - "--dir", - type=click.Path(exists=True), - default=".", - help=r"Pipeline directory. [dim]\[default: Current working directory][/]", -) -def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin - """ - List subworkflows installed locally in a pipeline - """ - try: - subworkflow_list = nf_core.subworkflows.SubworkflowList( - dir, - False, - ctx.obj["modules_repo_url"], - ctx.obj["modules_repo_branch"], - ctx.obj["modules_repo_no_pull"], - ) - stdout.print(subworkflow_list.list_components(keywords, json)) - except (UserWarning, LookupError) as e: - log.error(e) - sys.exit(1) - - # nf-core subworkflows remove @subworkflows.command() @click.pass_context @@ -1200,8 +1254,10 @@ def remove(ctx, dir, subworkflow): """ Remove a subworkflow from a pipeline. """ + from nf_core.subworkflows import SubworkflowRemove + try: - module_remove = nf_core.subworkflows.SubworkflowRemove( + module_remove = SubworkflowRemove( dir, ctx.obj["modules_repo_url"], ctx.obj["modules_repo_branch"], @@ -1256,8 +1312,10 @@ def update(ctx, subworkflow, dir, force, prompt, sha, all, preview, save_diff, u Fetches and updates subworkflow files from a remote repo e.g. nf-core/modules. """ + from nf_core.subworkflows import SubworkflowUpdate + try: - subworkflow_install = nf_core.subworkflows.SubworkflowUpdate( + subworkflow_install = SubworkflowUpdate( dir, force, prompt, @@ -1304,7 +1362,9 @@ def validate(pipeline, params): This command takes such a file and validates it against the pipeline schema, checking whether all schema rules are satisfied. """ - schema_obj = nf_core.schema.PipelineSchema() + from nf_core.schema import PipelineSchema + + schema_obj = PipelineSchema() try: schema_obj.get_schema_path(pipeline) # Load and check schema @@ -1348,8 +1408,10 @@ def build(dir, no_prompts, web_only, url): https://nf-co.re website where you can annotate and organise parameters. Listens for this to be completed and saves the updated schema. """ + from nf_core.schema import PipelineSchema + try: - schema_obj = nf_core.schema.PipelineSchema() + schema_obj = PipelineSchema() if schema_obj.build_schema(dir, no_prompts, web_only, url) is False: sys.exit(1) except (UserWarning, AssertionError) as e: @@ -1374,7 +1436,9 @@ def lint(schema_path): If no schema path is provided, "nextflow_schema.json" will be used (if it exists). """ - schema_obj = nf_core.schema.PipelineSchema() + from nf_core.schema import PipelineSchema + + schema_obj = PipelineSchema() try: schema_obj.get_schema_path(schema_path) schema_obj.load_lint_schema() @@ -1416,7 +1480,9 @@ def docs(schema_path, output, format, force, columns): log.error("Could not find 'nextflow_schema.json' in current directory. Please specify a path.") sys.exit(1) - schema_obj = nf_core.schema.PipelineSchema() + from nf_core.schema import PipelineSchema + + schema_obj = PipelineSchema() # Assume we're in a pipeline dir root if schema path not set schema_obj.get_schema_path(schema_path) schema_obj.load_schema() @@ -1449,19 +1515,22 @@ def bump_version(new_version, dir, nextflow): As well as the pipeline version, you can also change the required version of Nextflow. """ + from nf_core.bump_version import bump_nextflow_version, bump_pipeline_version + from nf_core.utils import Pipeline, is_pipeline_directory + try: # Check if pipeline directory contains necessary files - nf_core.utils.is_pipeline_directory(dir) + is_pipeline_directory(dir) # Make a pipeline object and load config etc - pipeline_obj = nf_core.utils.Pipeline(dir) + pipeline_obj = Pipeline(dir) pipeline_obj._load() # Bump the pipeline version number if not nextflow: - nf_core.bump_version.bump_pipeline_version(pipeline_obj, new_version) + bump_pipeline_version(pipeline_obj, new_version) else: - nf_core.bump_version.bump_nextflow_version(pipeline_obj, new_version) + bump_nextflow_version(pipeline_obj, new_version) except UserWarning as e: log.error(e) sys.exit(1) @@ -1494,14 +1563,17 @@ def sync(dir, from_branch, pull_request, github_repository, username, template_y the pipeline. It is run automatically for all pipelines when ever a new release of [link=https://github.com/nf-core/tools]nf-core/tools[/link] (and the included template) is made. """ + from nf_core.sync import PipelineSync, PullRequestException, SyncException + from nf_core.utils import is_pipeline_directory + # Check if pipeline directory contains necessary files - nf_core.utils.is_pipeline_directory(dir) + is_pipeline_directory(dir) # Sync the given pipeline dir - sync_obj = nf_core.sync.PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml) + sync_obj = PipelineSync(dir, from_branch, pull_request, github_repository, username, template_yaml) try: sync_obj.sync() - except (nf_core.sync.SyncException, nf_core.sync.PullRequestException) as e: + except (SyncException, PullRequestException) as e: log.error(e) sys.exit(1) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 129016fa38..b462ee1377 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -3,8 +3,8 @@ """ import logging -import os import re +from pathlib import Path import rich.console @@ -44,6 +44,17 @@ def bump_pipeline_version(pipeline_obj, new_version): ) ], ) + # multiqc_config.yaml + update_file_version( + Path("assets", "multiqc_config.yml"), + pipeline_obj, + [ + ( + rf"{re.escape(current_version)}", + f"{new_version}", + ) + ], + ) def bump_nextflow_version(pipeline_obj, new_version): @@ -77,7 +88,7 @@ def bump_nextflow_version(pipeline_obj, new_version): # .github/workflows/ci.yml - Nextflow version matrix update_file_version( - os.path.join(".github", "workflows", "ci.yml"), + Path(".github", "workflows", "ci.yml"), pipeline_obj, [ ( diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index c2b9abf569..37e275aea8 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -7,6 +7,7 @@ import pytest import questionary import rich +from git import InvalidGitRepositoryError, Repo import nf_core.modules.modules_utils import nf_core.utils @@ -179,6 +180,14 @@ def _run_pytests(self): console = rich.console.Console() console.rule(self.component_name, style="black") + # Check uncommitted changed + try: + repo = Repo(self.dir) + if repo.is_dirty(): + log.warning("You have uncommitted changes. Make sure to commit last changes before running the tests.") + except InvalidGitRepositoryError: + pass + # Set pytest arguments tag = self.component_name if self.component_type == "subworkflows": diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 5f8a2129c5..6fc6e03544 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -306,16 +306,6 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr # Update linked components self.update_linked_components(modules_to_update, subworkflows_to_update, updated) self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) - else: - # Don't save to a file, just iteratively update the variable - self.modules_json.update( - self.component_type, - modules_repo, - component, - version, - installed_by=None, - write_file=False, - ) if self.save_diff_fn: # Write the modules.json diff to the file diff --git a/nf_core/create.py b/nf_core/create.py index 9e3b38102f..089469f04c 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -144,7 +144,10 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa skip_paths = [] if param_dict["branded"] else ["branded"] for t_area in template_areas: - if t_area in template_yaml.get("skip", []): + areas_to_skip = template_yaml.get("skip", []) + if isinstance(areas_to_skip, str): + areas_to_skip = [areas_to_skip] + if t_area in areas_to_skip: if template_areas[t_area]["file"]: skip_paths.append(t_area) param_dict[t_area] = False @@ -352,6 +355,7 @@ def render_template(self): if self.template_yaml: with open(self.outdir / "pipeline_template.yml", "w") as fh: yaml.safe_dump(self.template_yaml, fh) + run_prettier_on_file(self.outdir / "pipeline_template.yml") def update_nextflow_schema(self): """ @@ -403,6 +407,12 @@ def fix_linting(self): ".github/workflows/awstest.yml", ".github/workflows/awsfulltest.yml", ], + "files_unchanged": [ + "CODE_OF_CONDUCT.md", + f"assets/nf-core-{short_name}_logo_light.png", + f"docs/images/nf-core-{short_name}_logo_light.png", + f"docs/images/nf-core-{short_name}_logo_dark.png", + ], "nextflow_config": [ "manifest.name", "manifest.homePage", @@ -415,9 +425,26 @@ def fix_linting(self): lint_config["files_exist"].extend( [ ".github/ISSUE_TEMPLATE/bug_report.yml", + ".github/ISSUE_TEMPLATE/feature_request.yml", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/CONTRIBUTING.md", + ".github/.dockstore.yml", + ".gitignore", + ] + ) + lint_config["files_unchanged"].extend( + [ + ".github/ISSUE_TEMPLATE/bug_report.yml", + ".github/ISSUE_TEMPLATE/config.yml", + ".github/ISSUE_TEMPLATE/feature_request.yml", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/workflows/branch.yml", + ".github/workflows/linting_comment.yml", + ".github/workflows/linting.yml", + ".github/CONTRIBUTING.md", + ".github/.dockstore.yml", ] ) - lint_config["files_unchanged"] = [".github/ISSUE_TEMPLATE/bug_report.yml"] # Add CI specific configurations if not self.template_params["ci"]: @@ -442,10 +469,18 @@ def fix_linting(self): ] ) + # Add igenomes specific configurations + if not self.template_params["igenomes"]: + lint_config["files_exist"].extend(["conf/igenomes.config"]) + # Add github badges specific configurations if not self.template_params["github_badges"] or not self.template_params["github"]: lint_config["readme"] = ["nextflow_badge"] + # If the pipeline is unbranded + if not self.template_params["branded"]: + lint_config["files_unchanged"].extend([".github/ISSUE_TEMPLATE/bug_report.yml"]) + # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) nf_core_yml["lint"] = lint_config @@ -461,7 +496,7 @@ def make_pipeline_logo(self): log.debug(f"Fetching logo from {logo_url}") email_logo_path = self.outdir / "assets" / f"{self.template_params['name_noslash']}_logo_light.png" - self.download_pipeline_logo(f"{logo_url}&w=400", email_logo_path) + self.download_pipeline_logo(f"{logo_url}?w=600&theme=light", email_logo_path) for theme in ["dark", "light"]: readme_logo_url = f"{logo_url}?w=600&theme={theme}" readme_logo_path = ( diff --git a/nf_core/download.py b/nf_core/download.py index cd36c65c4a..f049b41ab5 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -9,20 +9,29 @@ import re import shutil import subprocess -import sys import tarfile import textwrap +from datetime import datetime from zipfile import ZipFile +import git import questionary import requests import requests_cache import rich import rich.progress +from git.exc import GitCommandError, InvalidGitRepositoryError +from pkg_resources import parse_version as VersionParser import nf_core import nf_core.list import nf_core.utils +from nf_core.synced_repo import RemoteProgressbar, SyncedRepo +from nf_core.utils import ( + NFCORE_CACHE_DIR, + NFCORE_DIR, + SingularityCacheFilePathValidator, +) log = logging.getLogger(__name__) stderr = rich.console.Console( @@ -30,6 +39,11 @@ ) +class DownloadError(RuntimeError): + """A custom exception that is raised when nf-core download encounters a problem that we already took into consideration. + In this case, we do not want to print the traceback, but give the user some concise, helpful feedback instead.""" + + class DownloadProgress(rich.progress.Progress): """Custom Progress bar class, allowing us to have two progress bars with different columns / layouts. @@ -71,8 +85,9 @@ class DownloadWorkflow: Args: pipeline (str): A nf-core pipeline name. - revision (str): The workflow revision to download, like `1.0`. Defaults to None. - singularity (bool): Flag, if the Singularity container should be downloaded as well. Defaults to False. + revision (List[str]): The workflow revision to download, like `1.0`. Defaults to None. + container (bool): Flag, if the Singularity container should be downloaded as well. Defaults to False. + tower (bool): Flag, to customize the download for Nextflow Tower (convert to git bare repo). Defaults to False. outdir (str): Path to the local download directory. Defaults to None. """ @@ -83,26 +98,52 @@ def __init__( outdir=None, compress_type=None, force=False, - container=None, - singularity_cache_only=False, + tower=False, + download_configuration=None, + container_system=None, + container_library=None, + container_cache_utilisation=None, + container_cache_index=None, parallel_downloads=4, ): self.pipeline = pipeline - self.revision = revision + if isinstance(revision, str): + self.revision = [revision] + elif isinstance(revision, tuple): + self.revision = [*revision] + else: + self.revision = [] self.outdir = outdir self.output_filename = None self.compress_type = compress_type self.force = force - self.container = container - self.singularity_cache_only = singularity_cache_only + self.tower = tower + # if flag is not specified, do not assume deliberate choice and prompt config inclusion interactively. + # this implies that non-interactive "no" choice is only possible implicitly (e.g. with --tower or if prompt is suppressed by !stderr.is_interactive). + # only alternative would have been to make it a parameter with argument, e.g. -d="yes" or -d="no". + self.include_configs = True if download_configuration else False if bool(tower) else None + # Specifying a cache index or container library implies that containers should be downloaded. + self.container_system = "singularity" if container_cache_index or bool(container_library) else container_system + # Manually specified container library (registry) + if isinstance(container_library, str) and bool(len(container_library)): + self.container_library = [container_library] + elif isinstance(container_library, tuple) and bool(len(container_library)): + self.container_library = [*container_library] + else: + self.container_library = ["quay.io"] + # if a container_cache_index is given, use the file and overrule choice. + self.container_cache_utilisation = "remote" if container_cache_index else container_cache_utilisation + self.container_cache_index = container_cache_index + # allows to specify a container library / registry or a respective mirror to download images from self.parallel_downloads = parallel_downloads self.wf_revisions = {} self.wf_branches = {} - self.wf_sha = None - self.wf_download_url = None + self.wf_sha = {} + self.wf_download_url = {} self.nf_config = {} self.containers = [] + self.containers_remote = [] # stores the remote images provided in the file. # Fetch remote workflows self.wfs = nf_core.list.Workflows() @@ -119,72 +160,149 @@ def download_workflow(self): ) self.prompt_revision() self.get_revision_hash() - self.prompt_container_download() - self.prompt_use_singularity_cachedir() - self.prompt_singularity_cachedir_only() - self.prompt_compression_type() + # Inclusion of configs is unnecessary for Tower. + if not self.tower and self.include_configs is None: + self.prompt_config_inclusion() + # If a remote cache is specified, it is safe to assume images should be downloaded. + if not self.container_cache_utilisation == "remote": + self.prompt_container_download() + else: + self.container_system = "singularity" + self.prompt_singularity_cachedir_creation() + self.prompt_singularity_cachedir_utilization() + self.prompt_singularity_cachedir_remote() + # Nothing meaningful to compress here. + if not self.tower: + self.prompt_compression_type() except AssertionError as e: - log.critical(e) - sys.exit(1) - - summary_log = [f"Pipeline revision: '{self.revision}'", f"Pull containers: '{self.container}'"] - if self.container == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: - summary_log.append(f"Using [blue]$NXF_SINGULARITY_CACHEDIR[/]': {os.environ['NXF_SINGULARITY_CACHEDIR']}") + raise DownloadError(e) from e + + summary_log = [ + f"Pipeline revision: '{', '.join(self.revision) if len(self.revision) < 5 else self.revision[0]+',['+str(len(self.revision)-2)+' more revisions],'+self.revision[-1]}'", + f"Use containers: '{self.container_system}'", + ] + if self.container_system: + summary_log.append(f"Container library: '{', '.join(self.container_library)}'") + if self.container_system == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None: + summary_log.append(f"Using [blue]$NXF_SINGULARITY_CACHEDIR[/]': {os.environ['NXF_SINGULARITY_CACHEDIR']}'") + if self.containers_remote: + summary_log.append( + f"Successfully read {len(self.containers_remote)} containers from the remote '$NXF_SINGULARITY_CACHEDIR' contents." + ) # Set an output filename now that we have the outdir - if self.compress_type is not None: + if self.tower: + self.output_filename = f"{self.outdir}.git" + summary_log.append(f"Output file: '{self.output_filename}'") + elif self.compress_type is not None: self.output_filename = f"{self.outdir}.{self.compress_type}" summary_log.append(f"Output file: '{self.output_filename}'") else: summary_log.append(f"Output directory: '{self.outdir}'") + if not self.tower: + # Only show entry, if option was prompted. + summary_log.append(f"Include default institutional configuration: '{self.include_configs}'") + else: + summary_log.append(f"Enabled for seqeralabs® Nextflow Tower: '{self.tower}'") + # Check that the outdir doesn't already exist if os.path.exists(self.outdir): if not self.force: - log.error(f"Output directory '{self.outdir}' already exists (use [red]--force[/] to overwrite)") - sys.exit(1) + raise DownloadError( + f"Output directory '{self.outdir}' already exists (use [red]--force[/] to overwrite)" + ) log.warning(f"Deleting existing output directory: '{self.outdir}'") shutil.rmtree(self.outdir) # Check that compressed output file doesn't already exist if self.output_filename and os.path.exists(self.output_filename): if not self.force: - log.error(f"Output file '{self.output_filename}' already exists (use [red]--force[/] to overwrite)") - sys.exit(1) + raise DownloadError( + f"Output file '{self.output_filename}' already exists (use [red]--force[/] to overwrite)" + ) log.warning(f"Deleting existing output file: '{self.output_filename}'") os.remove(self.output_filename) # Summary log log.info("Saving '{}'\n {}".format(self.pipeline, "\n ".join(summary_log))) - # Download the pipeline files + # Perform the actual download + if self.tower: + self.download_workflow_tower() + else: + self.download_workflow_static() + + def download_workflow_static(self): + """Downloads a nf-core workflow from GitHub to the local file system in a self-contained manner.""" + + # Download the centralised configs first + if self.include_configs: + log.info("Downloading centralised configs from GitHub") + self.download_configs() + + # Download the pipeline files for each selected revision log.info("Downloading workflow files from GitHub") - self.download_wf_files() - # Download the centralised configs - log.info("Downloading centralised configs from GitHub") - self.download_configs() - try: - self.wf_use_local_configs() - except FileNotFoundError as e: - log.error("Error editing pipeline config file to use local configs!") - log.critical(e) - sys.exit(1) - - # Download the singularity images - if self.container == "singularity": - self.find_container_images() - try: - self.get_singularity_images() - except OSError as e: - log.critical(f"[red]{e}[/]") - sys.exit(1) + for item in zip(self.revision, self.wf_sha.values(), self.wf_download_url.values()): + revision_dirname = self.download_wf_files(revision=item[0], wf_sha=item[1], download_url=item[2]) + + if self.include_configs: + try: + self.wf_use_local_configs(revision_dirname) + except FileNotFoundError as e: + raise DownloadError("Error editing pipeline config file to use local configs!") from e + + # Collect all required singularity images + if self.container_system == "singularity": + self.find_container_images(os.path.join(self.outdir, revision_dirname)) + + try: + self.get_singularity_images(current_revision=item[0]) + except OSError as e: + raise DownloadError(f"[red]{e}[/]") from e # Compress into an archive if self.compress_type is not None: - log.info("Compressing download..") + log.info("Compressing output into archive") self.compress_download() + def download_workflow_tower(self, location=None): + """Create a bare-cloned git repository of the workflow, so it can be launched with `tw launch` as file:/ pipeline""" + + log.info("Collecting workflow from GitHub") + + self.workflow_repo = WorkflowRepo( + remote_url=f"https://github.com/{self.pipeline}.git", + revision=self.revision if self.revision else None, + commit=self.wf_sha.values() if bool(self.wf_sha) else None, + location=location if location else None, # manual location is required for the tests to work + in_cache=False, + ) + + # Remove tags for those revisions that had not been selected + self.workflow_repo.tidy_tags_and_branches() + + # create a bare clone of the modified repository needed for Tower + self.workflow_repo.bare_clone(os.path.join(self.outdir, self.output_filename)) + + # extract the required containers + if self.container_system == "singularity": + for revision, commit in self.wf_sha.items(): + # Checkout the repo in the current revision + self.workflow_repo.checkout(commit) + # Collect all required singularity images + self.find_container_images(self.workflow_repo.access()) + + try: + self.get_singularity_images(current_revision=revision) + except OSError as e: + raise DownloadError(f"[red]{e}[/]") from e + + # Justify why compression is skipped for Tower downloads (Prompt is not shown, but CLI argument could have been set) + if self.compress_type is not None: + log.info("Compression choice is ignored for Tower downloads since nothing can be reasonably compressed.") + def prompt_pipeline_name(self): """Prompt for the pipeline name if not set with a flag""" @@ -193,57 +311,105 @@ def prompt_pipeline_name(self): self.pipeline = nf_core.utils.prompt_remote_pipeline_name(self.wfs) def prompt_revision(self): - """Prompt for pipeline revision / branch""" - # Prompt user for revision tag if '--revision' was not set - if self.revision is None: - self.revision = nf_core.utils.prompt_pipeline_release_branch(self.wf_revisions, self.wf_branches) + """ + Prompt for pipeline revision / branch + Prompt user for revision tag if '--revision' was not set + If --tower is specified, allow to select multiple revisions + Also the static download allows for multiple revisions, but + we do not prompt this option interactively. + """ + if not bool(self.revision): + (choice, tag_set) = nf_core.utils.prompt_pipeline_release_branch( + self.wf_revisions, self.wf_branches, multiple=self.tower + ) + """ + The checkbox() prompt unfortunately does not support passing a Validator, + so a user who keeps pressing Enter will flounder past the selection without choice. + + bool(choice), bool(tag_set): + ############################# + True, True: A choice was made and revisions were available. + False, True: No selection was made, but revisions were available -> defaults to all available. + False, False: No selection was made because no revisions were available -> raise AssertionError. + True, False: Congratulations, you found a bug! That combo shouldn't happen. + """ + + if bool(choice): + # have to make sure that self.revision is a list of strings, regardless if choice is str or list of strings. + self.revision.append(choice) if isinstance(choice, str) else self.revision.extend(choice) + else: + if bool(tag_set): + self.revision = tag_set + log.info("No particular revision was selected, all available will be downloaded.") + else: + raise AssertionError(f"No revisions of {self.pipeline} available for download.") def get_revision_hash(self): """Find specified revision / branch hash""" - # Branch - if self.revision in self.wf_branches.keys(): - self.wf_sha = self.wf_branches[self.revision] + for revision in self.revision: # revision is a list of strings, but may be of length 1 + # Branch + if revision in self.wf_branches.keys(): + self.wf_sha = {**self.wf_sha, revision: self.wf_branches[revision]} - # Revision - else: - for r in self.wf_revisions: - if r["tag_name"] == self.revision: - self.wf_sha = r["tag_sha"] - break - - # Can't find the revisions or branch - throw an error + # Revision else: - log.info( - "Available {} revisions: '{}'".format( - self.pipeline, "', '".join([r["tag_name"] for r in self.wf_revisions]) + for r in self.wf_revisions: + if r["tag_name"] == revision: + self.wf_sha = {**self.wf_sha, revision: r["tag_sha"]} + break + + # Can't find the revisions or branch - throw an error + else: + log.info( + "Available {} revisions: '{}'".format( + self.pipeline, "', '".join([r["tag_name"] for r in self.wf_revisions]) + ) ) - ) - log.info("Available {} branches: '{}'".format(self.pipeline, "', '".join(self.wf_branches.keys()))) - raise AssertionError(f"Not able to find revision / branch '{self.revision}' for {self.pipeline}") + log.info("Available {} branches: '{}'".format(self.pipeline, "', '".join(self.wf_branches.keys()))) + raise AssertionError(f"Not able to find revision / branch '{revision}' for {self.pipeline}") # Set the outdir if not self.outdir: - self.outdir = f"{self.pipeline.replace('/', '-').lower()}-{self.revision}" - - # Set the download URL and return - self.wf_download_url = f"https://github.com/{self.pipeline}/archive/{self.wf_sha}.zip" + if len(self.wf_sha) > 1: + self.outdir = f"{self.pipeline.replace('/', '-').lower()}_{datetime.now().strftime('%Y-%m-%d_%H-%M')}" + else: + self.outdir = f"{self.pipeline.replace('/', '-').lower()}_{self.revision[0]}" + + if not self.tower: + for revision, wf_sha in self.wf_sha.items(): + # Set the download URL and return - only applicable for classic downloads + self.wf_download_url = { + **self.wf_download_url, + revision: f"https://github.com/{self.pipeline}/archive/{wf_sha}.zip", + } + + def prompt_config_inclusion(self): + """Prompt for inclusion of institutional configurations""" + if stderr.is_interactive: # Use rich auto-detection of interactive shells + self.include_configs = questionary.confirm( + "Include the nf-core's default institutional configuration files into the download?", + style=nf_core.utils.nfcore_question_style, + ).ask() + else: + self.include_configs = False + # do not include by default. def prompt_container_download(self): """Prompt whether to download container images or not""" - if self.container is None: + if self.container_system is None and stderr.is_interactive and not self.tower: stderr.print("\nIn addition to the pipeline code, this tool can download software containers.") - self.container = questionary.select( + self.container_system = questionary.select( "Download software container images:", choices=["none", "singularity"], style=nf_core.utils.nfcore_question_style, ).unsafe_ask() - def prompt_use_singularity_cachedir(self): + def prompt_singularity_cachedir_creation(self): """Prompt about using $NXF_SINGULARITY_CACHEDIR if not already set""" if ( - self.container == "singularity" + self.container_system == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is None and stderr.is_interactive # Use rich auto-detection of interactive shells ): @@ -254,6 +420,8 @@ def prompt_use_singularity_cachedir(self): if rich.prompt.Confirm.ask( "[blue bold]?[/] [bold]Define [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] for a shared Singularity image download folder?[/]" ): + if not self.container_cache_index: + self.container_cache_utilisation == "amend" # retain "remote" choice. # Prompt user for a cache directory path cachedir_path = None while cachedir_path is None: @@ -270,53 +438,129 @@ def prompt_use_singularity_cachedir(self): if cachedir_path: os.environ["NXF_SINGULARITY_CACHEDIR"] = cachedir_path - # Ask if user wants this set in their .bashrc - bashrc_path = os.path.expanduser("~/.bashrc") - if not os.path.isfile(bashrc_path): - bashrc_path = os.path.expanduser("~/.bash_profile") - if not os.path.isfile(bashrc_path): - bashrc_path = False - if bashrc_path: + """ + Optionally, create a permanent entry for the NXF_SINGULARITY_CACHEDIR in the terminal profile. + Currently support for bash and zsh. + ToDo: "sh", "dash", "ash","csh", "tcsh", "ksh", "fish", "cmd", "powershell", "pwsh"? + """ + + if os.getenv("SHELL", "") == "/bin/bash": + shellprofile_path = os.path.expanduser("~/~/.bash_profile") + if not os.path.isfile(shellprofile_path): + shellprofile_path = os.path.expanduser("~/.bashrc") + if not os.path.isfile(shellprofile_path): + shellprofile_path = False + elif os.getenv("SHELL", "") == "/bin/zsh": + shellprofile_path = os.path.expanduser("~/.zprofile") + if not os.path.isfile(shellprofile_path): + shellprofile_path = os.path.expanduser("~/.zshenv") + if not os.path.isfile(shellprofile_path): + shellprofile_path = False + else: + shellprofile_path = os.path.expanduser("~/.profile") + if not os.path.isfile(shellprofile_path): + shellprofile_path = False + + if shellprofile_path: stderr.print( - f"\nSo that [blue]$NXF_SINGULARITY_CACHEDIR[/] is always defined, you can add it to your [blue not bold]~/{os.path.basename(bashrc_path)}[/] file ." - "This will then be autmoatically set every time you open a new terminal. We can add the following line to this file for you: \n" + f"\nSo that [blue]$NXF_SINGULARITY_CACHEDIR[/] is always defined, you can add it to your [blue not bold]~/{os.path.basename(shellprofile_path)}[/] file ." + "This will then be automatically set every time you open a new terminal. We can add the following line to this file for you: \n" f'[blue]export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"[/]' ) append_to_file = rich.prompt.Confirm.ask( - f"[blue bold]?[/] [bold]Add to [blue not bold]~/{os.path.basename(bashrc_path)}[/] ?[/]" + f"[blue bold]?[/] [bold]Add to [blue not bold]~/{os.path.basename(shellprofile_path)}[/] ?[/]" ) if append_to_file: - with open(os.path.expanduser(bashrc_path), "a") as f: + with open(os.path.expanduser(shellprofile_path), "a") as f: f.write( "\n\n#######################################\n" f"## Added by `nf-core download` v{nf_core.__version__} ##\n" + f'export NXF_SINGULARITY_CACHEDIR="{cachedir_path}"' + "\n#######################################\n" ) - log.info(f"Successfully wrote to [blue]{bashrc_path}[/]") + log.info(f"Successfully wrote to [blue]{shellprofile_path}[/]") log.warning( "You will need reload your terminal after the download completes for this to take effect." ) - def prompt_singularity_cachedir_only(self): + def prompt_singularity_cachedir_utilization(self): """Ask if we should *only* use $NXF_SINGULARITY_CACHEDIR without copying into target""" if ( - self.singularity_cache_only is None - and self.container == "singularity" + self.container_cache_utilisation is None # no choice regarding singularity cache has been made. + and self.container_system == "singularity" and os.environ.get("NXF_SINGULARITY_CACHEDIR") is not None + and stderr.is_interactive ): stderr.print( - "\nIf you are working on the same system where you will run Nextflow, you can leave the downloaded images in the " - "[blue not bold]$NXF_SINGULARITY_CACHEDIR[/] folder, Nextflow will automatically find them. " + "\nIf you are working on the same system where you will run Nextflow, you can amend the downloaded images to the ones in the" + "[blue not bold]$NXF_SINGULARITY_CACHEDIR[/] folder, Nextflow will automatically find them." "However if you will transfer the downloaded files to a different system then they should be copied to the target folder." ) - self.singularity_cache_only = rich.prompt.Confirm.ask( - "[blue bold]?[/] [bold]Copy singularity images from [blue not bold]$NXF_SINGULARITY_CACHEDIR[/] to the target folder?[/]" - ) + self.container_cache_utilisation = questionary.select( + "Copy singularity images from $NXF_SINGULARITY_CACHEDIR to the target folder or amend new images to the cache?", + choices=["amend", "copy"], + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() - # Sanity check, for when passed as a cli flag - if self.singularity_cache_only and self.container != "singularity": - raise AssertionError("Command has '--singularity-cache-only' set, but '--container' is not 'singularity'") + def prompt_singularity_cachedir_remote(self): + """Prompt about the index of a remote $NXF_SINGULARITY_CACHEDIR""" + if ( + self.container_system == "singularity" + and self.container_cache_utilisation == "remote" + and self.container_cache_index is None + and stderr.is_interactive # Use rich auto-detection of interactive shells + ): + # Prompt user for a file listing the contents of the remote cache directory + cachedir_index = None + while cachedir_index is None: + prompt_cachedir_index = questionary.path( + "Specify a list of the container images that are already present on the remote system:", + validate=SingularityCacheFilePathValidator, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + cachedir_index = os.path.abspath(os.path.expanduser(prompt_cachedir_index)) + if prompt_cachedir_index == "": + log.error("Will disregard contents of a remote [blue]$NXF_SINGULARITY_CACHEDIR[/]") + self.container_cache_index = None + self.container_cache_utilisation = "copy" + elif not os.access(cachedir_index, os.R_OK): + log.error(f"'{cachedir_index}' is not a readable file.") + cachedir_index = None + if cachedir_index: + self.container_cache_index = cachedir_index + # in any case read the remote containers, even if no prompt was shown. + self.read_remote_containers() + + def read_remote_containers(self): + """Reads the file specified as index for the remote Singularity cache dir""" + if ( + self.container_system == "singularity" + and self.container_cache_utilisation == "remote" + and self.container_cache_index is not None + ): + n_total_images = 0 + try: + with open(self.container_cache_index) as indexfile: + for line in indexfile.readlines(): + match = re.search(r"([^\/\\]+\.img)", line, re.S) + if match: + n_total_images += 1 + self.containers_remote.append(match.group(0)) + if n_total_images == 0: + raise LookupError("Could not find valid container names in the index file.") + self.containers_remote = sorted(list(set(self.containers_remote))) + except (FileNotFoundError, LookupError) as e: + log.error(f"[red]Issue with reading the specified remote $NXF_SINGULARITY_CACHE index:[/]\n{e}\n") + if stderr.is_interactive and rich.prompt.Confirm.ask(f"[blue]Specify a new index file and try again?"): + self.container_cache_index = None # reset chosen path to index file. + self.prompt_singularity_cachedir_remote() + else: + log.info("Proceeding without consideration of the remote $NXF_SINGULARITY_CACHE index.") + self.container_cache_index = None + if os.environ.get("NXF_SINGULARITY_CACHEDIR"): + self.container_cache_utilisation = "copy" # default to copy if possible, otherwise skip. + else: + self.container_cache_utilisation = None def prompt_compression_type(self): """Ask user if we should compress the downloaded files""" @@ -324,7 +568,7 @@ def prompt_compression_type(self): stderr.print( "\nIf transferring the downloaded files to another system, it can be convenient to have everything compressed in a single file." ) - if self.container == "singularity": + if self.container_system == "singularity": stderr.print( "[bold]This is [italic]not[/] recommended when downloading Singularity images, as it can take a long time and saves very little space." ) @@ -343,24 +587,32 @@ def prompt_compression_type(self): if self.compress_type == "none": self.compress_type = None - def download_wf_files(self): + def download_wf_files(self, revision, wf_sha, download_url): """Downloads workflow files from GitHub to the :attr:`self.outdir`.""" - log.debug(f"Downloading {self.wf_download_url}") + log.debug(f"Downloading {download_url}") # Download GitHub zip file into memory and extract - url = requests.get(self.wf_download_url) + url = requests.get(download_url) with ZipFile(io.BytesIO(url.content)) as zipfile: zipfile.extractall(self.outdir) + # create a filesystem-safe version of the revision name for the directory + revision_dirname = re.sub("[^0-9a-zA-Z]+", "_", revision) + # account for name collisions, if there is a branch / release named "configs" or "singularity-images" + if revision_dirname in ["configs", "singularity-images"]: + revision_dirname = re.sub("[^0-9a-zA-Z]+", "_", self.pipeline + revision_dirname) + # Rename the internal directory name to be more friendly - gh_name = f"{self.pipeline}-{self.wf_sha}".split("/")[-1] - os.rename(os.path.join(self.outdir, gh_name), os.path.join(self.outdir, "workflow")) + gh_name = f"{self.pipeline}-{wf_sha if bool(wf_sha) else ''}".split("/")[-1] + os.rename(os.path.join(self.outdir, gh_name), os.path.join(self.outdir, revision_dirname)) # Make downloaded files executable - for dirpath, _, filelist in os.walk(os.path.join(self.outdir, "workflow")): + for dirpath, _, filelist in os.walk(os.path.join(self.outdir, revision_dirname)): for fname in filelist: os.chmod(os.path.join(dirpath, fname), 0o775) + return revision_dirname + def download_configs(self): """Downloads the centralised config profiles from nf-core/configs to :attr:`self.outdir`.""" configs_zip_url = "https://github.com/nf-core/configs/archive/master.zip" @@ -380,9 +632,9 @@ def download_configs(self): for fname in filelist: os.chmod(os.path.join(dirpath, fname), 0o775) - def wf_use_local_configs(self): + def wf_use_local_configs(self, revision_dirname): """Edit the downloaded nextflow.config file to use the local config files""" - nfconfig_fn = os.path.join(self.outdir, "workflow", "nextflow.config") + nfconfig_fn = os.path.join(self.outdir, revision_dirname, "nextflow.config") find_str = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" repl_str = "${projectDir}/../configs/" log.debug(f"Editing 'params.custom_config_base' in '{nfconfig_fn}'") @@ -396,7 +648,7 @@ def wf_use_local_configs(self): nfconfig = nfconfig.replace(find_str, repl_str) # Append the singularity.cacheDir to the end if we need it - if self.container == "singularity" and not self.singularity_cache_only: + if self.container_system == "singularity" and self.container_cache_utilisation == "copy": nfconfig += ( f"\n\n// Added by `nf-core download` v{nf_core.__version__} //\n" + 'singularity.cacheDir = "${projectDir}/../singularity-images/"' @@ -408,17 +660,100 @@ def wf_use_local_configs(self): with open(nfconfig_fn, "w") as nfconfig_fh: nfconfig_fh.write(nfconfig) - def find_container_images(self): + def find_container_images(self, workflow_directory): """Find container image names for workflow. Starts by using `nextflow config` to pull out any process.container - declarations. This works for DSL1. It should return a simple string with resolved logic. + declarations. This works for DSL1. It should return a simple string with resolved logic, + but not always, e.g. not for differentialabundance 1.2.0 Second, we look for DSL2 containers. These can't be found with `nextflow config` at the time of writing, so we scrape the pipeline files. - This returns raw source code that will likely need to be cleaned. + This returns raw matches that will likely need to be cleaned. + """ + + log.debug("Fetching container names for workflow") + # since this is run for multiple revisions now, account for previously detected containers. + previous_findings = [] if not self.containers else self.containers + config_findings = [] + module_findings = [] + + # Use linting code to parse the pipeline nextflow config + self.nf_config = nf_core.utils.fetch_wf_config(workflow_directory) - If multiple containers are found, prioritise any prefixed with http for direct download. + # Find any config variables that look like a container + for k, v in self.nf_config.items(): + if (k.startswith("process.") or k.startswith("params.")) and k.endswith(".container"): + """ + Can be plain string / Docker URI or DSL2 syntax + + Since raw parsing is done by Nextflow, single quotes will be (partially) escaped in DSL2. + Use cleaning regex on DSL2. Same as for modules, except that (?(?(?:.(?!(?[\'\"]) The quote character is captured into the quote group \1. + The pattern (?:.(?!\1))*.? is used to match any character (.) not followed by the closing quote character (?!\1). + This capture happens greedy *, but we add a .? to ensure that we don't match the whole file until the last occurrence + of the closing quote character, but rather stop at the first occurrence. \1 inserts the matched quote character into the regex, either " or '. + It may be followed by whitespace or closing bracket [\s}]* + re.DOTALL is used to account for the string to be spread out across multiple lines. + """ + container_regex = re.compile( + r"container\s+[\s{}=$]*(?P[\'\"])(?P(?:.(?!\1))*.?)\1[\s}]*", re.DOTALL + ) + + local_module_findings = re.findall(container_regex, search_space) + + # finding fill always be a tuple of length 2, first the quote used and second the enquoted value. + for finding in local_module_findings: + # append finding since we want to collect them from all modules + # also append search_space because we need to start over later if nothing was found. + module_findings.append((finding + (search_space, file_path))) + + # Not sure if there will ever be multiple container definitions per module, but beware DSL3. + # Like above run on shallow copy, because length may change at runtime. + module_findings = self.rectify_raw_container_matches(module_findings[:]) + + # Remove duplicates and sort + self.containers = sorted(list(set(previous_findings + config_findings + module_findings))) + + def rectify_raw_container_matches(self, raw_findings): + """Helper function to rectify the raw extracted container matches into fully qualified container names. + If multiple containers are found, any prefixed with http for direct download is prioritized Example syntax: @@ -434,71 +769,165 @@ def find_container_images(self): 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : 'biocontainers/fastqc:0.11.9--0' }" + Later DSL2, variable is being used: + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + "https://depot.galaxyproject.org/singularity/${container_id}" : + "quay.io/biocontainers/${container_id}" }" + + container_id = 'mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' + DSL1 / Special case DSL2: container "nfcore/cellranger:6.0.2" """ + cleaned_matches = [] - log.debug("Fetching container names for workflow") - containers_raw = [] + for _, container_value, search_space, file_path in raw_findings: + """ + Now we need to isolate all container paths (typically quoted strings) from the raw container_value - # Use linting code to parse the pipeline nextflow config - self.nf_config = nf_core.utils.fetch_wf_config(os.path.join(self.outdir, "workflow")) + For example from: - # Find any config variables that look like a container - for k, v in self.nf_config.items(): - if k.startswith("process.") and k.endswith(".container"): - containers_raw.append(v.strip('"').strip("'")) + "${ workflow.containerEngine == \'singularity\' && !task.ext.singularity_pull_docker_container ? + \'https://depot.galaxyproject.org/singularity/ubuntu:20.04\' : + \'nf-core/ubuntu:20.04\' }" - # Recursive search through any DSL2 module files for container spec lines. - for subdir, _, files in os.walk(os.path.join(self.outdir, "workflow", "modules")): - for file in files: - if file.endswith(".nf"): - file_path = os.path.join(subdir, file) - with open(file_path, "r") as fh: - # Look for any lines with `container = "xxx"` - this_container = None - contents = fh.read() - matches = re.findall(r"container\s*\"([^\"]*)\"", contents, re.S) - if matches: - for match in matches: - # Look for a http download URL. - # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/3809435/713980 - url_regex = r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" - url_match = re.search(url_regex, match, re.S) - if url_match: - this_container = url_match.group(0) - break # Prioritise http, exit loop as soon as we find it - - # No https download, is the entire container string a docker URI? - else: - # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/39672069/713980 - docker_regex = r"^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(? 1 else ''}") + # first check if container_value it is a plain container URI like in DSL1 pipelines? + # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/39672069/713980 + docker_regex = r"^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(?(?(?:.(?!(?(?(?:.(?!(? 1 else ''} in total." + ) + with DownloadProgress() as progress: - task = progress.add_task("all_containers", total=len(self.containers), progress_type="summary") + task = progress.add_task( + "Collecting container images", total=len(self.containers), progress_type="summary" + ) # Organise containers based on what we need to do with them containers_exist = [] @@ -520,8 +949,8 @@ def get_singularity_images(self): log.debug(f"Cache directory not found, creating: {cache_path_dir}") os.makedirs(cache_path_dir) - # We already have the target file in place, return - if os.path.exists(out_path): + # We already have the target file in place or in remote cache, return + if os.path.exists(out_path) or os.path.basename(out_path) in self.containers_remote: containers_exist.append(container) continue @@ -539,70 +968,113 @@ def get_singularity_images(self): containers_pull.append([container, out_path, cache_path]) # Exit if we need to pull images and Singularity is not installed - if len(containers_pull) > 0 and shutil.which("singularity") is None: - raise OSError("Singularity is needed to pull images, but it is not installed") - - # Go through each method of fetching containers in order - for container in containers_exist: - progress.update(task, description="Image file exists") - progress.update(task, advance=1) - - for container in containers_cache: - progress.update(task, description="Copying singularity images from cache") - self.singularity_copy_cache_image(*container) - progress.update(task, advance=1) - - with concurrent.futures.ThreadPoolExecutor(max_workers=self.parallel_downloads) as pool: - progress.update(task, description="Downloading singularity images") - - # Kick off concurrent downloads - future_downloads = [ - pool.submit(self.singularity_download_image, *container, progress) - for container in containers_download - ] - - # Make ctrl-c work with multi-threading - self.kill_with_fire = False - - try: - # Iterate over each threaded download, waiting for them to finish - for future in concurrent.futures.as_completed(future_downloads): - future.result() + if len(containers_pull) > 0: + if not (shutil.which("singularity") or shutil.which("apptainer")): + raise OSError( + "Singularity/Apptainer is needed to pull images, but it is not installed or not in $PATH" + ) + + if containers_exist: + if self.container_cache_index is not None: + log.info( + f"{len(containers_exist)} containers are already cached remotely and won't be retrieved." + ) + # Go through each method of fetching containers in order + for container in containers_exist: + progress.update(task, description="Image file exists at destination") + progress.update(task, advance=1) + + if containers_cache: + for container in containers_cache: + progress.update(task, description="Copying singularity images from cache") + self.singularity_copy_cache_image(*container) + progress.update(task, advance=1) + + if containers_download or containers_pull: + # if clause gives slightly better UX, because Download is no longer displayed if nothing is left to be downloaded. + with concurrent.futures.ThreadPoolExecutor(max_workers=self.parallel_downloads) as pool: + progress.update(task, description="Downloading singularity images") + + # Kick off concurrent downloads + future_downloads = [ + pool.submit(self.singularity_download_image, *container, progress) + for container in containers_download + ] + + # Make ctrl-c work with multi-threading + self.kill_with_fire = False + + try: + # Iterate over each threaded download, waiting for them to finish + for future in concurrent.futures.as_completed(future_downloads): + future.result() + try: + progress.update(task, advance=1) + except Exception as e: + log.error(f"Error updating progress bar: {e}") + + except KeyboardInterrupt: + # Cancel the future threads that haven't started yet + for future in future_downloads: + future.cancel() + # Set the variable that the threaded function looks for + # Will trigger an exception from each thread + self.kill_with_fire = True + # Re-raise exception on the main thread + raise + + for container in containers_pull: + progress.update(task, description="Pulling singularity images") + # it is possible to try multiple registries / mirrors if multiple were specified. + # Iteration happens over a copy of self.container_library[:], as I want to be able to remove failing registries for subsequent images. + for library in self.container_library[:]: try: - progress.update(task, advance=1) - except Exception as e: - log.error(f"Error updating progress bar: {e}") - - except KeyboardInterrupt: - # Cancel the future threads that haven't started yet - for future in future_downloads: - future.cancel() - # Set the variable that the threaded function looks for - # Will trigger an exception from each thread - self.kill_with_fire = True - # Re-raise exception on the main thread - raise - - for container in containers_pull: - progress.update(task, description="Pulling singularity images") - try: - self.singularity_pull_image(*container, progress) - except RuntimeWarning as r: - # Raise exception if this is not possible - log.error("Not able to pull image. Service might be down or internet connection is dead.") - raise r - progress.update(task, advance=1) + self.singularity_pull_image(*container, library, progress) + # Pulling the image was successful, no ContainerError was raised, break the library loop + break + except ContainerError.ImageExists as e: + # Pulling not required + break + except ContainerError.RegistryNotFound as e: + self.container_library.remove(library) + # The only library was removed + if not self.container_library: + log.error(e.message) + log.error(e.helpmessage) + raise OSError from e + else: + # Other libraries can be used + continue + except ContainerError.ImageNotFound as e: + # Try other registries + continue + except ContainerError.InvalidTag as e: + # Try other registries + continue + except ContainerError.OtherError as e: + # Try other registries + log.error(e.message) + log.error(e.helpmessage) + continue + else: + # The else clause executes after the loop completes normally. + # This means the library loop completed without breaking, indicating failure for all libraries (registries) + log.error( + f"Not able to pull image of {container}. Service might be down or internet connection is dead." + ) + # Task should advance in any case. Failure to pull will not kill the download process. + progress.update(task, advance=1) def singularity_image_filenames(self, container): """Check Singularity cache for image, copy to destination folder if found. Args: - container (str): A pipeline's container name. Can be direct download URL - or a Docker Hub repository ID. + container (str): A pipeline's container name. Can be direct download URL + or a Docker Hub repository ID. Returns: - results (bool, str): Returns True if we have the image in the target location. - Returns a download path if not. + results (bool, str): Returns True if we have the image in the target location. + Returns a download path if not. """ # Generate file paths @@ -630,11 +1102,11 @@ def singularity_image_filenames(self, container): if os.environ.get("NXF_SINGULARITY_CACHEDIR"): cache_path = os.path.join(os.environ["NXF_SINGULARITY_CACHEDIR"], out_name) # Use only the cache - set this as the main output path - if self.singularity_cache_only: + if self.container_cache_utilisation == "amend": out_path = cache_path cache_path = None - elif self.singularity_cache_only: - raise FileNotFoundError("'--singularity-cache' specified but no '$NXF_SINGULARITY_CACHEDIR' set!") + elif self.container_cache_utilisation in ["amend", "copy"]: + raise FileNotFoundError("Singularity cache is required but no '$NXF_SINGULARITY_CACHEDIR' set!") return (out_path, cache_path) @@ -715,7 +1187,7 @@ def singularity_download_image(self, container, out_path, cache_path, progress): # Re-raise the caught exception raise - def singularity_pull_image(self, container, out_path, cache_path, progress): + def singularity_pull_image(self, container, out_path, cache_path, library, progress): """Pull a singularity image using ``singularity pull`` Attempt to use a local installation of singularity to pull the image. @@ -723,15 +1195,20 @@ def singularity_pull_image(self, container, out_path, cache_path, progress): Args: container (str): A pipeline's container name. Usually it is of similar format to ``nfcore/name:version``. + library (list of str): A list of libraries to try for pulling the image. Raises: Various exceptions possible from `subprocess` execution of Singularity. """ output_path = cache_path or out_path - # Pull using singularity - address = f"docker://{container.replace('docker://', '')}" - singularity_command = ["singularity", "pull", "--name", output_path, address] + address = f"docker://{library}/{container.replace('docker://', '')}" + if shutil.which("singularity"): + singularity_command = ["singularity", "pull", "--name", output_path, address] + elif shutil.which("apptainer"): + singularity_command = ["apptainer", "pull", "--name", output_path, address] + else: + raise OSError("Singularity/Apptainer is needed to pull images, but it is not installed or not in $PATH") log.debug(f"Building singularity image: {address}") log.debug(f"Singularity command: {' '.join(singularity_command)}") @@ -754,9 +1231,15 @@ def singularity_pull_image(self, container, out_path, cache_path, progress): if lines: # something went wrong with the container retrieval if any("FATAL: " in line for line in lines): - log.info("Singularity container retrieval fialed with the following error:") - log.info("".join(lines)) - raise FileNotFoundError(f'The container "{container}" is unavailable.\n{"".join(lines)}') + progress.remove_task(task) + raise ContainerError( + container=container, + registry=library, + address=address, + out_path=out_path if out_path else cache_path or "", + singularity_command=singularity_command, + error_msg=lines, + ) # Copy cached download if we are using the cache if cache_path: @@ -794,5 +1277,341 @@ def compress_download(self): log.debug(f"Deleting uncompressed files: '{self.outdir}'") shutil.rmtree(self.outdir) - # Caclualte md5sum for output file + # Calculate md5sum for output file log.info(f"MD5 checksum for '{self.output_filename}': [blue]{nf_core.utils.file_md5(self.output_filename)}[/]") + + +class WorkflowRepo(SyncedRepo): + """ + An object to store details about a locally cached workflow repository. + + Important Attributes: + fullname: The full name of the repository, ``nf-core/{self.pipelinename}``. + local_repo_dir (str): The local directory, where the workflow is cloned into. Defaults to ``$HOME/.cache/nf-core/nf-core/{self.pipeline}``. + + """ + + def __init__( + self, + remote_url, + revision, + commit, + location=None, + hide_progress=False, + in_cache=True, + ): + """ + Initializes the object and clones the workflows git repository if it is not already present + + Args: + remote_url (str): The URL of the remote repository. Defaults to None. + self.revision (list of str): The revisions to include. A list of strings. + commits (dict of str): The checksums to linked with the revisions. + no_pull (bool, optional): Whether to skip the pull step. Defaults to False. + hide_progress (bool, optional): Whether to hide the progress bar. Defaults to False. + in_cache (bool, optional): Whether to clone the repository from the cache. Defaults to False. + """ + self.remote_url = remote_url + if isinstance(revision, str): + self.revision = [revision] + elif isinstance(revision, list): + self.revision = [*revision] + else: + self.revision = [] + if isinstance(commit, str): + self.commit = [commit] + elif isinstance(commit, list): + self.commit = [*commit] + else: + self.commit = [] + self.fullname = nf_core.modules.modules_utils.repo_full_name_from_remote(self.remote_url) + self.retries = 0 # retries for setting up the locally cached repository + self.hide_progress = hide_progress + + self.setup_local_repo(remote=remote_url, location=location, in_cache=in_cache) + + # expose some instance attributes + self.tags = self.repo.tags + + def __repr__(self): + """Called by print, creates representation of object""" + return f"" + + def access(self): + if os.path.exists(self.local_repo_dir): + return self.local_repo_dir + else: + return None + + def checkout(self, commit): + return super().checkout(commit) + + def get_remote_branches(self, remote_url): + return super().get_remote_branches(remote_url) + + def retry_setup_local_repo(self, skip_confirm=False): + self.retries += 1 + if skip_confirm or rich.prompt.Confirm.ask( + f"[violet]Delete local cache '{self.local_repo_dir}' and try again?" + ): + if ( + self.retries > 1 + ): # One unconfirmed retry is acceptable, but prevent infinite loops without user interaction. + raise DownloadError( + f"Errors with locally cached repository of '{self.fullname}'. Please delete '{self.local_repo_dir}' manually and try again." + ) + if not skip_confirm: # Feedback to user for manual confirmation. + log.info(f"Removing '{self.local_repo_dir}'") + shutil.rmtree(self.local_repo_dir) + self.setup_local_repo(self.remote_url, in_cache=False) + else: + raise DownloadError("Exiting due to error with locally cached Git repository.") + + def setup_local_repo(self, remote, location=None, in_cache=True): + """ + Sets up the local git repository. If the repository has been cloned previously, it + returns a git.Repo object of that clone. Otherwise it tries to clone the repository from + the provided remote URL and returns a git.Repo of the new clone. + + Args: + remote (str): git url of remote + location (Path): location where the clone should be created/cached. + in_cache (bool, optional): Whether to clone the repository from the cache. Defaults to False. + Sets self.repo + """ + if location: + self.local_repo_dir = os.path.join(location, self.fullname) + else: + self.local_repo_dir = os.path.join(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) + + try: + if not os.path.exists(self.local_repo_dir): + try: + pbar = rich.progress.Progress( + "[bold blue]{task.description}", + rich.progress.BarColumn(bar_width=None), + "[bold yellow]{task.fields[state]}", + transient=True, + disable=os.environ.get("HIDE_PROGRESS", None) is not None or self.hide_progress, + ) + with pbar: + self.repo = git.Repo.clone_from( + remote, + self.local_repo_dir, + progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Cloning"), + ) + super().update_local_repo_status(self.fullname, True) + except GitCommandError: + raise DownloadError(f"Failed to clone from the remote: `{remote}`") + else: + self.repo = git.Repo(self.local_repo_dir) + + if super().no_pull_global: + super().update_local_repo_status(self.fullname, True) + # If the repo is already cloned, fetch the latest changes from the remote + if not super().local_repo_synced(self.fullname): + pbar = rich.progress.Progress( + "[bold blue]{task.description}", + rich.progress.BarColumn(bar_width=None), + "[bold yellow]{task.fields[state]}", + transient=True, + disable=os.environ.get("HIDE_PROGRESS", None) is not None or self.hide_progress, + ) + with pbar: + self.repo.remotes.origin.fetch( + progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Pulling") + ) + super().update_local_repo_status(self.fullname, True) + + except (GitCommandError, InvalidGitRepositoryError) as e: + log.error(f"[red]Could not set up local cache of modules repository:[/]\n{e}\n") + self.retry_setup_local_repo() + + def tidy_tags_and_branches(self): + """ + Function to delete all tags and branches that are not of interest to the downloader. + This allows a clutter-free experience in Tower. The untagged commits are evidently still available. + + However, due to local caching, the downloader might also want access to revisions that had been deleted before. + In that case, don't bother with re-adding the tags and rather download anew from Github. + """ + if self.revision and self.repo and self.repo.tags: + # create a set to keep track of the revisions to process & check + desired_revisions = set(self.revision) + + # determine what needs pruning + tags_to_remove = {tag for tag in self.repo.tags if tag.name not in desired_revisions.union({"latest"})} + heads_to_remove = {head for head in self.repo.heads if head.name not in desired_revisions.union({"latest"})} + + try: + # delete unwanted tags from repository + for tag in tags_to_remove: + self.repo.delete_tag(tag) + self.tags = self.repo.tags + + # switch to a revision that should be kept, because deleting heads fails, if they are checked out (e.g. "master") + self.checkout(self.revision[0]) + + # delete unwanted heads/branches from repository + for head in heads_to_remove: + self.repo.delete_head(head) + + # ensure all desired revisions/branches are available + for revision in desired_revisions: + if not self.repo.is_valid_object(revision): + self.checkout(revision) + self.repo.create_head(revision, revision) + if self.repo.head.is_detached: + self.repo.head.reset(index=True, working_tree=True) + + # no branch exists, but one is required for Tower's UI to display revisions correctly). Thus, "latest" will be created. + if not bool(self.repo.heads): + if self.repo.is_valid_object("latest"): + # "latest" exists as tag but not as branch + self.repo.create_head("latest", "latest") # create a new head for latest + self.checkout("latest") + else: + # desired revisions may contain arbitrary branch names that do not correspond to valid sematic versioning patterns. + valid_versions = [ + VersionParser(v) + for v in desired_revisions + if re.match(r"\d+\.\d+(?:\.\d+)*(?:[\w\-_])*", v) + ] + # valid versions sorted in ascending order, last will be aliased as "latest". + latest = sorted(valid_versions)[-1] + self.repo.create_head("latest", latest) + self.checkout(latest) + if self.repo.head.is_detached: + self.repo.head.reset(index=True, working_tree=True) + + self.heads = self.repo.heads + + # get all tags and available remote_branches + completed_revisions = {revision.name for revision in self.repo.heads + self.repo.tags} + + # verify that all requested revisions are available. + # a local cache might lack revisions that were deleted during a less comprehensive previous download. + if bool(desired_revisions - completed_revisions): + log.info( + f"Locally cached version of the pipeline lacks selected revisions {', '.join(desired_revisions - completed_revisions)}. Downloading anew from GitHub..." + ) + self.retry_setup_local_repo(skip_confirm=True) + self.tidy_tags_and_branches() + except (GitCommandError, InvalidGitRepositoryError) as e: + log.error(f"[red]Adapting your pipeline download unfortunately failed:[/]\n{e}\n") + self.retry_setup_local_repo(skip_confirm=True) + raise DownloadError(e) from e + + def bare_clone(self, destination): + if self.repo: + try: + destfolder = os.path.abspath(destination) + if not os.path.exists(destfolder): + os.makedirs(destfolder) + if os.path.exists(destination): + shutil.rmtree(os.path.abspath(destination)) + self.repo.clone(os.path.abspath(destination), bare=True) + except (OSError, GitCommandError, InvalidGitRepositoryError) as e: + log.error(f"[red]Failure to create the pipeline download[/]\n{e}\n") + + +# Distinct errors for the container download, required for acting on the exceptions + + +class ContainerError(Exception): + """A class of errors related to pulling containers with Singularity/Apptainer""" + + def __init__(self, container, registry, address, out_path, singularity_command, error_msg): + self.container = container + self.registry = registry + self.address = address + self.out_path = out_path + self.singularity_command = singularity_command + self.error_msg = error_msg + + for line in error_msg: + if re.search(r"dial\stcp.*no\ssuch\shost", line): + self.error_type = self.RegistryNotFound(self) + break + elif ( + re.search(r"requested\saccess\sto\sthe\sresource\sis\sdenied", line) + or re.search(r"StatusCode:\s404", line) + or re.search(r"invalid\sstatus\scode\sfrom\sregistry\s400", line) + ): + # Unfortunately, every registry seems to return an individual error here: + # Docker.io: denied: requested access to the resource is denied + # unauthorized: authentication required + # Quay.io: StatusCode: 404, \n'] + # ghcr.io: Requesting bearer token: invalid status code from registry 400 (Bad Request) + self.error_type = self.ImageNotFound(self) + break + elif re.search(r"manifest\sunknown", line): + self.error_type = self.InvalidTag(self) + break + elif re.search(r"Image\sfile\salready\sexists", line): + self.error_type = self.ImageExists(self) + break + else: + continue + else: + self.error_type = self.OtherError(self) + + log.error(self.error_type.message) + log.info(self.error_type.helpmessage) + log.debug(f'Failed command:\n{" ".join(singularity_command)}') + log.debug(f'Singularity error messages:\n{"".join(error_msg)}') + + raise self.error_type + + class RegistryNotFound(ConnectionRefusedError): + """The specified registry does not resolve to a valid IP address""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = ( + f'[bold red]The specified container library "{self.error_log.registry}" is invalid or unreachable.[/]\n' + ) + self.helpmessage = ( + f'Please check, if you made a typo when providing "-l / --library {self.error_log.registry}"\n' + ) + super().__init__(self.message, self.helpmessage, self.error_log) + + class ImageNotFound(FileNotFoundError): + """The image can not be found in the registry""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = ( + f'[bold red]"Pulling "{self.error_log.container}" from "{self.error_log.address}" failed.[/]\n' + ) + self.helpmessage = f'Saving image of "{self.error_log.container}" failed.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.f\n' + super().__init__(self.message) + + class InvalidTag(AttributeError): + """Image and registry are valid, but the (version) tag is not""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = f'[bold red]"{self.error_log.address.split(":")[-1]}" is not a valid tag of "{self.error_log.container}"[/]\n' + self.helpmessage = f'Please chose a different library than {self.error_log.registry}\nor try to locate the "{self.error_log.address.split(":")[-1]}" version of "{self.error_log.container}" manually.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' + super().__init__(self.message) + + class ImageExists(FileExistsError): + """Image already exists in cache/output directory.""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = ( + f'[bold red]"{self.error_log.container}" already exists at destination and cannot be pulled[/]\n' + ) + self.helpmessage = f'Saving image of "{self.error_log.container}" failed, because "{self.error_log.out_path}" exists.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' + super().__init__(self.message) + + class OtherError(RuntimeError): + """Undefined error with the container""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = f'[bold red]"{self.error_log.container}" failed for unclear reasons.[/]\n' + self.helpmessage = f'Pulling of "{self.error_log.container}" failed.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' + super().__init__(self.message, self.helpmessage, self.error_log) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 417208a20d..47b27bb514 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -3,6 +3,7 @@ FROM gitpod/workspace-base USER root # Install util tools. +# software-properties-common is needed to add ppa support for Apptainer installation RUN apt-get update --quiet && \ apt-get install --quiet --yes \ apt-transport-https \ @@ -13,7 +14,13 @@ RUN apt-get update --quiet && \ wget \ curl \ tree \ - graphviz + graphviz \ + software-properties-common + +# Install Apptainer (Singularity) +RUN add-apt-repository -y ppa:apptainer/ppa && \ + apt-get update --quiet && \ + apt install -y apptainer # Install Conda RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ @@ -38,13 +45,17 @@ RUN conda config --add channels defaults && \ conda config --set channel_priority strict && \ conda install --quiet --yes --name base mamba && \ mamba install --quiet --yes --name base \ - nextflow=22.10.1 \ - nf-core \ - nf-test \ - black \ - prettier \ - pytest-workflow && \ + nextflow \ + nf-core \ + nf-test \ + black \ + prettier \ + pre-commit \ + pytest-workflow && \ mamba clean --all -f -y +# Update Nextflow +RUN nextflow self-update + # Install nf-core RUN python -m pip install . diff --git a/nf_core/lint/files_exist.py b/nf_core/lint/files_exist.py index eb8c04916a..02baae7db8 100644 --- a/nf_core/lint/files_exist.py +++ b/nf_core/lint/files_exist.py @@ -52,7 +52,6 @@ def files_exist(self): docs/README.md docs/usage.md lib/nfcore_external_java_deps.jar - lib/NfcoreSchema.groovy lib/NfcoreTemplate.groovy lib/Utils.groovy lib/WorkflowMain.groovy @@ -161,7 +160,6 @@ def files_exist(self): [os.path.join("docs", "README.md")], [os.path.join("docs", "usage.md")], [os.path.join("lib", "nfcore_external_java_deps.jar")], - [os.path.join("lib", "NfcoreSchema.groovy")], [os.path.join("lib", "NfcoreTemplate.groovy")], [os.path.join("lib", "Utils.groovy")], [os.path.join("lib", "WorkflowMain.groovy")], diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index c0be64d0d7..2b64d62638 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -40,7 +40,6 @@ def files_unchanged(self): docs/images/nf-core-PIPELINE_logo_dark.png docs/README.md' lib/nfcore_external_java_deps.jar - lib/NfcoreSchema.groovy lib/NfcoreTemplate.groovy ['LICENSE', 'LICENSE.md', 'LICENCE', 'LICENCE.md'], # NB: British / American spelling @@ -105,7 +104,6 @@ def files_unchanged(self): [os.path.join("docs", "images", f"nf-core-{short_name}_logo_dark.png")], [os.path.join("docs", "README.md")], [os.path.join("lib", "nfcore_external_java_deps.jar")], - [os.path.join("lib", "NfcoreSchema.groovy")], [os.path.join("lib", "NfcoreTemplate.groovy")], ] files_partial = [ diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index 3378efce5f..9eff60091f 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -71,12 +71,13 @@ def multiqc_config(self): if "report_comment" not in ignore_configs: # Check that the minimum plugins exist and are coming first in the summary try: + version = self.nf_config.get("manifest.version", "").strip(" '\"") if "report_comment" not in mqc_yml: raise AssertionError() if mqc_yml["report_comment"].strip() != ( - f'This report has been generated by the nf-core/{self.pipeline_name} analysis pipeline. For information about how to ' - f'interpret these results, please see the documentation.' ): raise AssertionError() diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index af018331f0..d22fa944ed 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -62,11 +62,11 @@ def nextflow_config(self): * Should always be set to default value: ``https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}`` - * ``params.show_hidden_params`` + * ``params.validationShowHiddenParams`` * Determines whether boilerplate params are showed by schema. Set to ``false`` by default - * ``params.schema_ignore_params`` + * ``params.validationSchemaIgnoreParams`` * A comma separated string of inputs the schema validation should ignore. @@ -130,8 +130,8 @@ def nextflow_config(self): ["process.time"], ["params.outdir"], ["params.input"], - ["params.show_hidden_params"], - ["params.schema_ignore_params"], + ["params.validationShowHiddenParams"], + ["params.validationSchemaIgnoreParams"], ] # Throw a warning if these are missing config_warn = [ diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index ae5c542837..55060442b1 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -36,7 +36,7 @@ def readme(self): if "nextflow_badge" not in ignore_configs: # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A522.10.1-23aa62.svg)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) diff --git a/nf_core/module-template/modules/main.nf b/nf_core/module-template/modules/main.nf index 83cdf90b92..404d38094d 100644 --- a/nf_core/module-template/modules/main.nf +++ b/nf_core/module-template/modules/main.nf @@ -94,4 +94,26 @@ process {{ component_name_underscore|upper }} { {{ tool }}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) END_VERSIONS """ + + stub: + def args = task.ext.args ?: '' + {% if has_meta -%} + def prefix = task.ext.prefix ?: "${meta.id}" + {%- endif %} + {% if not_empty_template -%} + // TODO nf-core: A stub section should mimic the execution of the original module as best as possible + // Have a look at the following examples: + // Simple example: https://github.com/nf-core/modules/blob/818474a292b4860ae8ff88e149fbcda68814114d/modules/nf-core/bcftools/annotate/main.nf#L47-L63 + // Complex example: https://github.com/nf-core/modules/blob/818474a292b4860ae8ff88e149fbcda68814114d/modules/nf-core/bedtools/split/main.nf#L38-L54 + {%- endif %} + """ + {% if not_empty_template -%} + touch ${prefix}.bam + {%- endif %} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + {{ tool }}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) + END_VERSIONS + """ } diff --git a/nf_core/module-template/modules/meta.yml b/nf_core/module-template/modules/meta.yml index 2c8197dcba..aea3c36aa3 100644 --- a/nf_core/module-template/modules/meta.yml +++ b/nf_core/module-template/modules/meta.yml @@ -30,7 +30,7 @@ input: type: map description: | Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] + e.g. `[ id:'test', single_end:false ]` {% endif %} {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input @@ -49,7 +49,7 @@ output: type: map description: | Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] + e.g. `[ id:'test', single_end:false ]` {% endif %} - versions: type: file diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 24d673b1c2..9d8fb177cd 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -69,6 +69,7 @@ def __init__( remote_url=None, branch=None, no_pull=False, + registry=None, hide_progress=False, ): super().__init__( @@ -114,6 +115,7 @@ def __init__( ) for m in self.get_local_components() ] + self.config = nf_core.utils.fetch_wf_config(self.dir, cache_config=True) else: module_dir = Path(self.dir, self.default_modules_path) self.all_remote_modules = [ @@ -124,6 +126,15 @@ def __init__( if not self.all_remote_modules: raise LookupError("No modules in 'modules' directory") + # This could be better, perhaps glob for all nextflow.config files in? + self.config = nf_core.utils.fetch_wf_config(Path(self.dir).joinpath("tests", "config"), cache_config=True) + + if registry is None: + self.registry = self.config.get("docker.registry", "quay.io") + else: + self.registry = registry + log.debug(f"Registry set to {self.registry}") + self.lint_config = None self.modules_json = None @@ -145,6 +156,7 @@ def get_all_lint_tests(is_pipeline): def lint( self, module=None, + registry="quay.io", key=(), all_modules=False, print_results=True, @@ -227,11 +239,11 @@ def lint( # Lint local modules if local and len(local_modules) > 0: - self.lint_modules(local_modules, local=True, fix_version=fix_version) + self.lint_modules(local_modules, registry=registry, local=True, fix_version=fix_version) # Lint nf-core modules if len(remote_modules) > 0: - self.lint_modules(remote_modules, local=False, fix_version=fix_version) + self.lint_modules(remote_modules, registry=registry, local=False, fix_version=fix_version) if print_results: self._print_results(show_passed=show_passed, sort_by=sort_by) @@ -264,12 +276,13 @@ def filter_tests_by_key(self, key): # If -k supplied, only run these tests self.lint_tests = [k for k in self.lint_tests if k in key] - def lint_modules(self, modules, local=False, fix_version=False): + def lint_modules(self, modules, registry="quay.io", local=False, fix_version=False): """ Lint a list of modules Args: modules ([NFCoreModule]): A list of module objects + registry (str): The container registry to use. Should be quay.io in most situations. local (boolean): Whether the list consist of local or nf-core modules fix_version (boolean): Fix the module version if a newer version is available """ @@ -290,9 +303,9 @@ def lint_modules(self, modules, local=False, fix_version=False): for mod in modules: progress_bar.update(lint_progress, advance=1, test_name=mod.module_name) - self.lint_module(mod, progress_bar, local=local, fix_version=fix_version) + self.lint_module(mod, progress_bar, registry=registry, local=local, fix_version=fix_version) - def lint_module(self, mod, progress_bar, local=False, fix_version=False): + def lint_module(self, mod, progress_bar, registry, local=False, fix_version=False): """ Perform linting on one module @@ -311,7 +324,7 @@ def lint_module(self, mod, progress_bar, local=False, fix_version=False): # Only check the main script in case of a local module if local: - self.main_nf(mod, fix_version, progress_bar) + self.main_nf(mod, fix_version, self.registry, progress_bar) self.passed += [LintResult(mod, *m) for m in mod.passed] warned = [LintResult(mod, *m) for m in (mod.warned + mod.failed)] if not self.fail_warned: @@ -323,7 +336,7 @@ def lint_module(self, mod, progress_bar, local=False, fix_version=False): else: for test_name in self.lint_tests: if test_name == "main_nf": - getattr(self, test_name)(mod, fix_version, progress_bar) + getattr(self, test_name)(mod, fix_version, self.registry, progress_bar) else: getattr(self, test_name)(mod) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index bacc7d7fbd..18d95bd37e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -17,7 +17,7 @@ log = logging.getLogger(__name__) -def main_nf(module_lint_object, module, fix_version, progress_bar): +def main_nf(module_lint_object, module, fix_version, registry, progress_bar): """ Lint a ``main.nf`` module file @@ -121,7 +121,7 @@ def main_nf(module_lint_object, module, fix_version, progress_bar): module.passed.append(("main_nf_script_outputs", "Process 'output' block found", module.main_nf)) # Check the process definitions - if check_process_section(module, process_lines, fix_version, progress_bar): + if check_process_section(module, process_lines, registry, fix_version, progress_bar): module.passed.append(("main_nf_container", "Container versions match", module.main_nf)) else: module.warned.append(("main_nf_container", "Container versions do not match", module.main_nf)) @@ -209,12 +209,20 @@ def check_when_section(self, lines): self.passed.append(("when_condition", "when: condition is unchanged", self.main_nf)) -def check_process_section(self, lines, fix_version, progress_bar): - """ - Lint the section of a module between the process definition +def check_process_section(self, lines, registry, fix_version, progress_bar): + """Lint the section of a module between the process definition and the 'input:' definition Specifically checks for correct software versions and containers + + Args: + lines (List[str]): Content of process. + registry (str): Base Docker registry for containers. Typically quay.io. + fix_version (bool): Fix software version + progress_bar (ProgressBar): Progress bar to update. + + Returns: + Optional[bool]: True if singularity and docker containers match, False otherwise. If process definition does not exist, None. """ # Check that we have a process section if len(lines) == 0: @@ -223,8 +231,8 @@ def check_process_section(self, lines, fix_version, progress_bar): self.passed.append(("process_exist", "Process definition exists", self.main_nf)) # Checks that build numbers of bioconda, singularity and docker container are matching - singularity_tag = "singularity" - docker_tag = "docker" + singularity_tag = None + docker_tag = None bioconda_packages = [] # Process name should be all capital letters @@ -240,7 +248,12 @@ def check_process_section(self, lines, fix_version, progress_bar): # Deprecated enable_conda for i, l in enumerate(lines): url = None - l = l.strip(" '\"") + l = l.strip(" \n'\"}:") + + # Catch preceeding "container " + if l.startswith("container"): + l = l.replace("container", "").strip(" \n'\"}:") + if _container_type(l) == "conda": bioconda_packages = [b for b in l.split() if "bioconda::" in b] match = re.search(r"params\.enable_conda", l) @@ -261,9 +274,10 @@ def check_process_section(self, lines, fix_version, progress_bar): ) ) if _container_type(l) == "singularity": - # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' :" -> v1.2.0_cv1 - # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' :" -> 0.11.9--0 - match = re.search(r"(?:/)?(?:biocontainers_)?(?::)?([A-Za-z\d\-_.]+?)(?:\.img)?'", l) + # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img -> v1.2.0_cv1 + # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0 -> 0.11.9--0 + # Please god let's find a better way to do this than regex + match = re.search(r"(?:[:.])?([A-Za-z\d\-_.]+?)(?:\.img)?(?:\.sif)?$", l) if match is not None: singularity_tag = match.group(1) self.passed.append(("singularity_tag", f"Found singularity tag: {singularity_tag}", self.main_nf)) @@ -271,42 +285,56 @@ def check_process_section(self, lines, fix_version, progress_bar): self.failed.append(("singularity_tag", "Unable to parse singularity tag", self.main_nf)) singularity_tag = None url = urlparse(l.split("'")[0]) - # lint double quotes - if l.count('"') > 2: - self.failed.append( - ( - "container_links", - "Too many double quotes found when specifying singularity container", - self.main_nf, - ) - ) + if _container_type(l) == "docker": - # e.g. "quay.io/biocontainers/krona:2.7.1--pl526_5' }" -> 2.7.1--pl526_5 - # e.g. "biocontainers/biocontainers:v1.2.0_cv1' }" -> v1.2.0_cv1 - match = re.search(r"(?:[/])?(?::)?([A-Za-z\d\-_.]+)'", l) + # e.g. "quay.io/biocontainers/krona:2.7.1--pl526_5 -> 2.7.1--pl526_5 + # e.g. "biocontainers/biocontainers:v1.2.0_cv1 -> v1.2.0_cv1 + match = re.search(r":([A-Za-z\d\-_.]+)$", l) if match is not None: docker_tag = match.group(1) self.passed.append(("docker_tag", f"Found docker tag: {docker_tag}", self.main_nf)) else: self.failed.append(("docker_tag", "Unable to parse docker tag", self.main_nf)) docker_tag = None - if l.startswith("biocontainers/"): - # When we think it is a biocontainer, assume we are querying quay.io/biocontainers and insert quay.io as prefix - l = "quay.io/" + l - url = urlparse(l.split("'")[0]) - # lint double quotes - if l.count('"') > 2: + if l.startswith(registry): + l_stripped = re.sub(r"\W+$", "", l) self.failed.append( - ("container_links", "Too many double quotes found when specifying docker container", self.main_nf) + ( + "container_links", + f"{l_stripped} container name found, please use just 'organisation/container:tag' instead.", + self.main_nf, + ) ) + else: + self.passed.append(("container_links", f"Container prefix is correct", self.main_nf)) + + # Guess if container name is simple one (e.g. nfcore/ubuntu:20.04) + # If so, add quay.io as default container prefix + if l.count("/") == 1 and l.count(":") == 1: + l = "/".join([registry, l]).replace("//", "/") + url = urlparse(l.split("'")[0]) + # lint double quotes - if l.startswith("container"): + if l.startswith("container") or _container_type(l) == "docker" or _container_type(l) == "singularity": if l.count('"') > 2: self.failed.append( - ("container_links", "Too many double quotes found when specifying containers", self.main_nf) + ( + "container_links", + f"Too many double quotes found when specifying container: {l.lstrip('container ')}", + self.main_nf, + ) ) + else: + self.passed.append( + ( + "container_links", + f"Correct number of double quotes found when specifying container: {l.lstrip('container ')}", + self.main_nf, + ) + ) + # lint more than one container in the same line - if ("https://containers" in l or "https://depot" in l) and ("biocontainers/" in l or "quay.io/" in l): + if ("https://containers" in l or "https://depot" in l) and ("biocontainers/" in l or l.startswith(registry)): self.warned.append( ( "container_links", @@ -331,8 +359,14 @@ def check_process_section(self, lines, fix_version, progress_bar): log.debug(f"Unable to connect to url '{urlunparse(url)}' due to error: {e}") self.failed.append(("container_links", "Unable to connect to container URL", self.main_nf)) continue - if response.status_code != 200: - self.failed.append(("container_links", "Unable to connect to container URL", self.main_nf)) + if not response.ok: + self.failed.append( + ( + "container_links", + f"Unable to connect to {response.url}, status code: {response.status_code}", + self.main_nf, + ) + ) # Check that all bioconda packages have build numbers # Also check for newer versions @@ -392,7 +426,11 @@ def check_process_section(self, lines, fix_version, progress_bar): else: self.passed.append(("bioconda_latest", f"Conda package is the latest available: `{bp}`", self.main_nf)) - return docker_tag == singularity_tag + # Check if a tag exists at all. If not, return None. + if singularity_tag is None or docker_tag is None: + return None + else: + return docker_tag == singularity_tag def check_process_labels(self, lines): @@ -571,7 +609,7 @@ def _container_type(line): """Returns the container type of a build.""" if line.startswith("conda"): return "conda" - if line.startswith("https://containers") or line.startswith("https://depot"): + if line.startswith("https://") or line.startswith("https://depot"): # Look for a http download URL. # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/3809435/713980 url_regex = ( @@ -581,9 +619,5 @@ def _container_type(line): if url_match: return "singularity" return None - if ( - line.startswith("biocontainers/") - or line.startswith("quay.io/") - or (line.count("/") == 1 and line.count(":") == 1) - ): + if line.count("/") >= 1 and line.count(":") == 1 and line.count(" ") == 0 and "https://" not in line: return "docker" diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 5f77148867..152ed7b0c0 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -11,7 +11,8 @@ import nf_core.modules.modules_json import nf_core.modules.modules_utils -from nf_core.utils import NFCORE_DIR, load_tools_config +from nf_core.synced_repo import RemoteProgressbar, SyncedRepo +from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, load_tools_config log = logging.getLogger(__name__) @@ -21,44 +22,7 @@ NF_CORE_MODULES_DEFAULT_BRANCH = "master" -class RemoteProgressbar(git.RemoteProgress): - """ - An object to create a progressbar for when doing an operation with the remote. - Note that an initialized rich Progress (progress bar) object must be past - during initialization. - """ - - def __init__(self, progress_bar, repo_name, remote_url, operation): - """ - Initializes the object and adds a task to the progressbar passed as 'progress_bar' - - Args: - progress_bar (rich.progress.Progress): A rich progress bar object - repo_name (str): Name of the repository the operation is performed on - remote_url (str): Git URL of the repository the operation is performed on - operation (str): The operation performed on the repository, i.e. 'Pulling', 'Cloning' etc. - """ - super().__init__() - self.progress_bar = progress_bar - self.tid = self.progress_bar.add_task( - f"{operation} from [bold green]'{repo_name}'[/bold green] ([link={remote_url}]{remote_url}[/link])", - start=False, - state="Waiting for response", - ) - - def update(self, op_code, cur_count, max_count=None, message=""): - """ - Overrides git.RemoteProgress.update. - Called every time there is a change in the remote operation - """ - if not self.progress_bar.tasks[self.tid].started: - self.progress_bar.start_task(self.tid) - self.progress_bar.update( - self.tid, total=max_count, completed=cur_count, state=f"{cur_count / max_count * 100:.1f}%" - ) - - -class ModulesRepo: +class ModulesRepo(SyncedRepo): """ An object to store details about the repository being used for modules. @@ -73,45 +37,6 @@ class ModulesRepo: local_repo_statuses = {} no_pull_global = False - @staticmethod - def local_repo_synced(repo_name): - """ - Checks whether a local repo has been cloned/pull in the current session - """ - return ModulesRepo.local_repo_statuses.get(repo_name, False) - - @staticmethod - def update_local_repo_status(repo_name, up_to_date): - """ - Updates the clone/pull status of a local repo - """ - ModulesRepo.local_repo_statuses[repo_name] = up_to_date - - @staticmethod - def get_remote_branches(remote_url): - """ - Get all branches from a remote repository - - Args: - remote_url (str): The git url to the remote repository - - Returns: - (set[str]): All branches found in the remote - """ - try: - unparsed_branches = git.Git().ls_remote(remote_url) - except git.GitCommandError: - raise LookupError(f"Was unable to fetch branches from '{remote_url}'") - else: - branches = {} - for branch_info in unparsed_branches.split("\n"): - sha, name = branch_info.split("\t") - if name != "HEAD": - # The remote branches are shown as 'ref/head/branch' - branch_name = Path(name).stem - branches[sha] = branch_name - return set(branches.values()) - def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=False): """ Initializes the object and clones the git repository if it is not already present @@ -146,27 +71,7 @@ def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=Fa self.avail_module_names = None - def verify_sha(self, prompt, sha): - """ - Verify that 'sha' and 'prompt' arguments are not provided together. - Verify that the provided SHA exists in the repo. - - Arguments: - prompt (bool): prompt asking for SHA - sha (str): provided sha - """ - if prompt and sha is not None: - log.error("Cannot use '--sha' and '--prompt' at the same time!") - return False - - if sha: - if not self.sha_exists_on_branch(sha): - log.error(f"Commit SHA '{sha}' doesn't exist in '{self.remote_url}'") - return False - - return True - - def setup_local_repo(self, remote, branch, hide_progress=True): + def setup_local_repo(self, remote, branch, hide_progress=True, in_cache=False): """ Sets up the local git repository. If the repository has been cloned previously, it returns a git.Repo object of that clone. Otherwise it tries to clone the repository from @@ -177,7 +82,7 @@ def setup_local_repo(self, remote, branch, hide_progress=True): branch (str): name of branch to use Sets self.repo """ - self.local_repo_dir = os.path.join(NFCORE_DIR, self.fullname) + self.local_repo_dir = os.path.join(NFCORE_DIR if not in_cache else NFCORE_CACHE_DIR, self.fullname) try: if not os.path.exists(self.local_repo_dir): try: @@ -236,263 +141,3 @@ def setup_local_repo(self, remote, branch, hide_progress=True): self.setup_local_repo(remote, branch, hide_progress) else: raise LookupError("Exiting due to error with local modules git repo") - - def setup_branch(self, branch): - """ - Verify that we have a branch and otherwise use the default one. - The branch is then checked out to verify that it exists in the repo. - - Args: - branch (str): Name of branch - """ - if branch is None: - # Don't bother fetching default branch if we're using nf-core - if self.remote_url == NF_CORE_MODULES_REMOTE: - self.branch = "master" - else: - self.branch = self.get_default_branch() - else: - self.branch = branch - - # Verify that the branch exists by checking it out - self.branch_exists() - - def get_default_branch(self): - """ - Gets the default branch for the repo (the branch origin/HEAD is pointing to) - """ - origin_head = next(ref for ref in self.repo.refs if ref.name == "origin/HEAD") - _, branch = origin_head.ref.name.split("/") - return branch - - def branch_exists(self): - """ - Verifies that the branch exists in the repository by trying to check it out - """ - try: - self.checkout_branch() - except GitCommandError: - raise LookupError(f"Branch '{self.branch}' not found in '{self.remote_url}'") - - def verify_branch(self): - """ - Verifies the active branch conforms do the correct directory structure - """ - dir_names = os.listdir(self.local_repo_dir) - if "modules" not in dir_names: - err_str = f"Repository '{self.remote_url}' ({self.branch}) does not contain the 'modules/' directory" - if "software" in dir_names: - err_str += ( - ".\nAs of nf-core/tools version 2.0, the 'software/' directory should be renamed to 'modules/'" - ) - raise LookupError(err_str) - - def checkout_branch(self): - """ - Checks out the specified branch of the repository - """ - self.repo.git.checkout(self.branch) - - def checkout(self, commit): - """ - Checks out the repository at the requested commit - - Args: - commit (str): Git SHA of the commit - """ - self.repo.git.checkout(commit) - - def component_exists(self, component_name, component_type, checkout=True, commit=None): - """ - Check if a module/subworkflow exists in the branch of the repo - - Args: - component_name (str): The name of the module/subworkflow - - Returns: - (bool): Whether the module/subworkflow exists in this branch of the repository - """ - return component_name in self.get_avail_components(component_type, checkout=checkout, commit=commit) - - def get_component_dir(self, component_name, component_type): - """ - Returns the file path of a module/subworkflow directory in the repo. - Does not verify that the path exists. - Args: - component_name (str): The name of the module/subworkflow - - Returns: - component_path (str): The path of the module/subworkflow in the local copy of the repository - """ - if component_type == "modules": - return os.path.join(self.modules_dir, component_name) - elif component_type == "subworkflows": - return os.path.join(self.subworkflows_dir, component_name) - - def install_component(self, component_name, install_dir, commit, component_type): - """ - Install the module/subworkflow files into a pipeline at the given commit - - Args: - component_name (str): The name of the module/subworkflow - install_dir (str): The path where the module/subworkflow should be installed - commit (str): The git SHA for the version of the module/subworkflow to be installed - - Returns: - (bool): Whether the operation was successful or not - """ - # Check out the repository at the requested ref - try: - self.checkout(commit) - except git.GitCommandError: - return False - - # Check if the module/subworkflow exists in the branch - if not self.component_exists(component_name, component_type, checkout=False): - log.error( - f"The requested {component_type[:-1]} does not exists in the branch '{self.branch}' of {self.remote_url}'" - ) - return False - - # Copy the files from the repo to the install folder - shutil.copytree(self.get_component_dir(component_name, component_type), Path(install_dir, component_name)) - - # Switch back to the tip of the branch - self.checkout_branch() - return True - - def module_files_identical(self, module_name, base_path, commit): - """ - Checks whether the module files in a pipeline are identical to the ones in the remote - Args: - module_name (str): The name of the module - base_path (str): The path to the module in the pipeline - - Returns: - (bool): Whether the pipeline files are identical to the repo files - """ - if commit is None: - self.checkout_branch() - else: - self.checkout(commit) - module_files = ["main.nf", "meta.yml"] - files_identical = {file: True for file in module_files} - module_dir = self.get_component_dir(module_name, "modules") - for file in module_files: - try: - files_identical[file] = filecmp.cmp(os.path.join(module_dir, file), os.path.join(base_path, file)) - except FileNotFoundError: - log.debug(f"Could not open file: {os.path.join(module_dir, file)}") - continue - self.checkout_branch() - return files_identical - - def get_component_git_log(self, component_name, component_type, depth=None): - """ - Fetches the commit history the of requested module/subworkflow since a given date. The default value is - not arbitrary - it is the last time the structure of the nf-core/modules repository was had an - update breaking backwards compatibility. - Args: - component_name (str): Name of module/subworkflow - modules_repo (ModulesRepo): A ModulesRepo object configured for the repository in question - - Returns: - ( dict ): Iterator of commit SHAs and associated (truncated) message - """ - self.checkout_branch() - component_path = os.path.join(component_type, self.repo_path, component_name) - commits_new = self.repo.iter_commits(max_count=depth, paths=component_path) - commits_new = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_new - ] - commits_old = [] - if component_type == "modules": - # Grab commits also from previous modules structure - component_path = os.path.join("modules", component_name) - commits_old = self.repo.iter_commits(max_count=depth, paths=component_path) - commits_old = [ - {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_old - ] - commits = iter(commits_new + commits_old) - return commits - - def get_latest_component_version(self, component_name, component_type): - """ - Returns the latest commit in the repository - """ - return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] - - def sha_exists_on_branch(self, sha): - """ - Verifies that a given commit sha exists on the branch - """ - self.checkout_branch() - return sha in (commit.hexsha for commit in self.repo.iter_commits()) - - def get_commit_info(self, sha): - """ - Fetches metadata about the commit (dates, message, etc.) - Args: - commit_sha (str): The SHA of the requested commit - Returns: - message (str): The commit message for the requested commit - date (str): The commit date for the requested commit - Raises: - LookupError: If the search for the commit fails - """ - self.checkout_branch() - for commit in self.repo.iter_commits(): - if commit.hexsha == sha: - message = commit.message.partition("\n")[0] - date_obj = commit.committed_datetime - date = str(date_obj.date()) - return message, date - raise LookupError(f"Commit '{sha}' not found in the '{self.remote_url}'") - - def get_avail_components(self, component_type, checkout=True, commit=None): - """ - Gets the names of the modules/subworkflows in the repository. They are detected by - checking which directories have a 'main.nf' file - - Returns: - ([ str ]): The module/subworkflow names - """ - if checkout: - self.checkout_branch() - if commit is not None: - self.checkout(commit) - # Get directory - if component_type == "modules": - directory = self.modules_dir - elif component_type == "subworkflows": - directory = self.subworkflows_dir - # Module/Subworkflow directories are characterized by having a 'main.nf' file - avail_component_names = [ - os.path.relpath(dirpath, start=directory) - for dirpath, _, file_names in os.walk(directory) - if "main.nf" in file_names - ] - return avail_component_names - - def get_meta_yml(self, component_type, module_name): - """ - Returns the contents of the 'meta.yml' file of a module - - Args: - module_name (str): The name of the module - - Returns: - (str): The contents of the file in text format - """ - self.checkout_branch() - if component_type == "modules": - path = Path(self.modules_dir, module_name, "meta.yml") - elif component_type == "subworkflows": - path = Path(self.subworkflows_dir, module_name, "meta.yml") - else: - raise ValueError(f"Invalid component type: {component_type}") - if not path.exists(): - return None - with open(path) as fh: - contents = fh.read() - return contents diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 9afdd2987b..ecdda0f86b 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -124,4 +124,3 @@ To get started: Devcontainer specs: - [DevContainer config](.devcontainer/devcontainer.json) -- [Dockerfile](.devcontainer/Dockerfile) diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml index f93cd55d59..063690f29f 100644 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml @@ -42,7 +42,7 @@ body: attributes: label: System information description: | - * Nextflow version _(eg. 22.10.1)_ + * Nextflow version _(eg. 23.04.0)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter, Charliecloud, or Apptainer)_ diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 4942167b12..2f83a0962c 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Launch workflow via tower - uses: seqeralabs/action-tower-launch@v1 + uses: seqeralabs/action-tower-launch@v2 # TODO nf-core: You can customise AWS full pipeline tests as required # Add full size test data (but still relatively small datasets for few samples) # on the `test_full.config` test runs with only one set of parameters {%- raw %} @@ -22,13 +22,18 @@ jobs: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} + revision: ${{ github.sha }} workdir: s3://${{ secrets.AWS_S3_BUCKET }}{% endraw %}/work/{{ short_name }}/{% raw %}work-${{ github.sha }}{% endraw %} parameters: | { + "hook_url": "{% raw %}${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}{% endraw %}", "outdir": "s3://{% raw %}${{ secrets.AWS_S3_BUCKET }}{% endraw %}/{{ short_name }}/{% raw %}results-${{ github.sha }}{% endraw %}" } - profiles: test_full,aws_tower + profiles: test_full + - uses: actions/upload-artifact@v3 with: name: Tower debug log file - path: tower_action_*.log + path: | + tower_action_*.log + tower_action_*.json diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml index 7f80cf1bb5..9a0bf4afbc 100644 --- a/nf_core/pipeline-template/.github/workflows/awstest.yml +++ b/nf_core/pipeline-template/.github/workflows/awstest.yml @@ -12,18 +12,22 @@ jobs: steps: # Launch workflow using Tower CLI tool action {%- raw %} - name: Launch workflow via tower - uses: seqeralabs/action-tower-launch@v1 + uses: seqeralabs/action-tower-launch@v2 with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} + revision: ${{ github.sha }} workdir: s3://${{ secrets.AWS_S3_BUCKET }}{% endraw %}/work/{{ short_name }}/{% raw %}work-${{ github.sha }}{% endraw %} parameters: | { "outdir": "s3://{% raw %}${{ secrets.AWS_S3_BUCKET }}{% endraw %}/{{ short_name }}/{% raw %}results-test-${{ github.sha }}{% endraw %}" } - profiles: test,aws_tower + profiles: test + - uses: actions/upload-artifact@v3 with: name: Tower debug log file - path: tower_action_*.log + path: | + tower_action_*.log + tower_action_*.json diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index bf3dc36bc5..521f3e664a 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -24,7 +24,7 @@ jobs: strategy: matrix: NXF_VER: - - "22.10.1" + - "23.04.0" - "latest-everything" steps: - name: Check out pipeline code diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 85d95ecc8e..25488dcc08 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -1,4 +1,9 @@ image: nfcore/gitpod:latest +tasks: + - name: Update Nextflow and setup pre-commit + command: | + pre-commit install --install-hooks + nextflow self-update vscode: extensions: # based on nf-core.nf-core-extensionpack diff --git a/nf_core/pipeline-template/CITATIONS.md b/nf_core/pipeline-template/CITATIONS.md index 740b045103..ceaba0cb5f 100644 --- a/nf_core/pipeline-template/CITATIONS.md +++ b/nf_core/pipeline-template/CITATIONS.md @@ -12,7 +12,10 @@ - [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) + > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. Available online https://www.bioinformatics.babraham.ac.uk/projects/fastqc/. + - [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) + > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. ## Software packaging/containerisation tools @@ -31,5 +34,8 @@ - [Docker](https://dl.acm.org/doi/10.5555/2600239.2600241) + > Merkel, D. (2014). Docker: lightweight linux containers for consistent development and deployment. Linux Journal, 2014(239), 2. doi: 10.5555/2600239.2600241. + - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) + > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index e2ca15a8e6..e66746dcd5 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -10,7 +10,7 @@ {%- if github_badges -%} [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A522.10.1-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -82,11 +82,11 @@ nextflow run {{ name }} \ {% if branded -%} -For more details, please refer to the [usage documentation](https://nf-co.re/{{ short_name }}/usage) and the [parameter documentation](https://nf-co.re/{{ short_name }}/parameters). +For more details and further functionality, please refer to the [usage documentation](https://nf-co.re/{{ short_name }}/usage) and the [parameter documentation](https://nf-co.re/{{ short_name }}/parameters). ## Pipeline output -To see the the results of a test run with a full size dataset refer to the [results](https://nf-co.re/{{ short_name }}/results) tab on the nf-core website pipeline page. +To see the results of an example test run with a full size dataset refer to the [results](https://nf-co.re/{{ short_name }}/results) tab on the nf-core website pipeline page. For more details about the output files and reports, please refer to the [output documentation](https://nf-co.re/{{ short_name }}/output). diff --git a/nf_core/pipeline-template/assets/methods_description_template.yml b/nf_core/pipeline-template/assets/methods_description_template.yml index b2dc0a99c1..ad505900a8 100644 --- a/nf_core/pipeline-template/assets/methods_description_template.yml +++ b/nf_core/pipeline-template/assets/methods_description_template.yml @@ -3,17 +3,21 @@ description: "Suggested text and references to use when describing pipeline usag section_name: "{{ name }} Methods Description" section_href: "https://github.com/{{ name }}" plot_type: "html" -## TODO nf-core: Update the HTML below to your prefered methods description, e.g. add publication citation for this pipeline +## TODO nf-core: Update the HTML below to your preferred methods description, e.g. add publication citation for this pipeline ## You inject any metadata in the Nextflow '${workflow}' object data: |

Methods

-

Data was processed using {{ name }} v${workflow.manifest.version} ${doi_text} of the nf-core collection of workflows (Ewels et al., 2020).

+

Data was processed using {{ name }} v${workflow.manifest.version} ${doi_text} of the nf-core collection of workflows (Ewels et al., 2020), utilising reproducible software environments from the Bioconda (Grüning et al., 2018) and Biocontainers (da Veiga Leprevost et al., 2017) projects.

The pipeline was executed with Nextflow v${workflow.nextflow.version} (Di Tommaso et al., 2017) with the following command:

${workflow.commandLine}
+

${tool_citations}

References

    -
  • Di Tommaso, P., Chatzou, M., Floden, E. W., Barja, P. P., Palumbo, E., & Notredame, C. (2017). Nextflow enables reproducible computational workflows. Nature Biotechnology, 35(4), 316-319. https://doi.org/10.1038/nbt.3820
  • -
  • Ewels, P. A., Peltzer, A., Fillinger, S., Patel, H., Alneberg, J., Wilm, A., Garcia, M. U., Di Tommaso, P., & Nahnsen, S. (2020). The nf-core framework for community-curated bioinformatics pipelines. Nature Biotechnology, 38(3), 276-278. https://doi.org/10.1038/s41587-020-0439-x
  • +
  • Di Tommaso, P., Chatzou, M., Floden, E. W., Barja, P. P., Palumbo, E., & Notredame, C. (2017). Nextflow enables reproducible computational workflows. Nature Biotechnology, 35(4), 316-319. doi: 10.1038/nbt.3820
  • +
  • Ewels, P. A., Peltzer, A., Fillinger, S., Patel, H., Alneberg, J., Wilm, A., Garcia, M. U., Di Tommaso, P., & Nahnsen, S. (2020). The nf-core framework for community-curated bioinformatics pipelines. Nature Biotechnology, 38(3), 276-278. doi: 10.1038/s41587-020-0439-x
  • +
  • Grüning, B., Dale, R., Sjödin, A., Chapman, B. A., Rowe, J., Tomkins-Tinch, C. H., Valieris, R., Köster, J., & Bioconda Team. (2018). Bioconda: sustainable and comprehensive software distribution for the life sciences. Nature Methods, 15(7), 475–476. doi: 10.1038/s41592-018-0046-7
  • +
  • da Veiga Leprevost, F., Grüning, B. A., Alves Aflitos, S., Röst, H. L., Uszkoreit, J., Barsnes, H., Vaudel, M., Moreno, P., Gatto, L., Weber, J., Bai, M., Jimenez, R. C., Sachsenberg, T., Pfeuffer, J., Vera Alvarez, R., Griss, J., Nesvizhskii, A. I., & Perez-Riverol, Y. (2017). BioContainers: an open-source and community-driven framework for software standardization. Bioinformatics (Oxford, England), 33(16), 2580–2582. doi: 10.1093/bioinformatics/btx192
  • + ${tool_bibliography}
Notes:
diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index 440b0b9a3a..570ed3d8e5 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -1,7 +1,7 @@ report_comment: > - This report has been generated by the {{ name }} + This report has been generated by the {{ name }} analysis pipeline.{% if branded %} For information about how to interpret these results, please see the - documentation.{% endif %} + documentation.{% endif %} report_section_order: "{{ name_noslash }}-methods-description": order: -1000 diff --git a/nf_core/pipeline-template/assets/slackreport.json b/nf_core/pipeline-template/assets/slackreport.json index 043d02f275..ec03b3968a 100644 --- a/nf_core/pipeline-template/assets/slackreport.json +++ b/nf_core/pipeline-template/assets/slackreport.json @@ -3,7 +3,7 @@ { "fallback": "Plain-text summary of the attachment.", "color": "<% if (success) { %>good<% } else { %>danger<%} %>", - "author_name": "sanger-tol/readmapping v${version} - ${runName}", + "author_name": "{{ name }} v${version} - ${runName}", "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", "fields": [ diff --git a/nf_core/pipeline-template/conf/test_full.config b/nf_core/pipeline-template/conf/test_full.config index 46b165a910..d92692fa94 100644 --- a/nf_core/pipeline-template/conf/test_full.config +++ b/nf_core/pipeline-template/conf/test_full.config @@ -10,8 +10,6 @@ ---------------------------------------------------------------------------------------- */ -cleanup = true - params { config_profile_name = 'Full test profile' config_profile_description = 'Full test dataset to check pipeline function' diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 73e1132541..e89c2a7332 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -61,7 +61,7 @@ An [example samplesheet](../assets/samplesheet.csv) has been provided with the p The typical command for running the pipeline is as follows: ```bash -nextflow run {{ name }} --input samplesheet.csv --outdir --genome GRCh37 -profile docker +nextflow run {{ name }} --input ./samplesheet.csv --outdir ./results --genome GRCh37 -profile docker ``` This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles. @@ -80,7 +80,8 @@ If you wish to repeatedly use the same parameters for multiple runs, rather than Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. > ⚠️ Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). -> The above pipeline run specified with a params file in yaml format: + +The above pipeline run specified with a params file in yaml format: ```bash nextflow run {{ name }} -profile docker -params-file params.yaml @@ -92,7 +93,6 @@ with `params.yaml` containing: input: './samplesheet.csv' outdir: './results/' genome: 'GRCh37' -input: 'data' <...> ``` diff --git a/nf_core/pipeline-template/lib/NfcoreSchema.groovy b/nf_core/pipeline-template/lib/NfcoreSchema.groovy deleted file mode 100755 index 9b34804d6d..0000000000 --- a/nf_core/pipeline-template/lib/NfcoreSchema.groovy +++ /dev/null @@ -1,530 +0,0 @@ -// -// This file holds several functions used to perform JSON parameter validation, help and summary rendering for the nf-core pipeline template. -// - -import nextflow.Nextflow -import org.everit.json.schema.Schema -import org.everit.json.schema.loader.SchemaLoader -import org.everit.json.schema.ValidationException -import org.json.JSONObject -import org.json.JSONTokener -import org.json.JSONArray -import groovy.json.JsonSlurper -import groovy.json.JsonBuilder - -class NfcoreSchema { - - // - // Resolve Schema path relative to main workflow directory - // - public static String getSchemaPath(workflow, schema_filename='nextflow_schema.json') { - return "${workflow.projectDir}/${schema_filename}" - } - - // - // Function to loop over all parameters defined in schema and check - // whether the given parameters adhere to the specifications - // - /* groovylint-disable-next-line UnusedPrivateMethodParameter */ - public static void validateParameters(workflow, params, log, schema_filename='nextflow_schema.json') { - def has_error = false - //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~// - // Check for nextflow core params and unexpected params - def json = new File(getSchemaPath(workflow, schema_filename=schema_filename)).text - def Map schemaParams = (Map) new JsonSlurper().parseText(json).get('definitions') - def nf_params = [ - // Options for base `nextflow` command - 'bg', - 'c', - 'C', - 'config', - 'd', - 'D', - 'dockerize', - 'h', - 'log', - 'q', - 'quiet', - 'syslog', - 'v', - - // Options for `nextflow run` command - 'ansi', - 'ansi-log', - 'bg', - 'bucket-dir', - 'c', - 'cache', - 'config', - 'dsl2', - 'dump-channels', - 'dump-hashes', - 'E', - 'entry', - 'latest', - 'lib', - 'main-script', - 'N', - 'name', - 'offline', - 'params-file', - 'pi', - 'plugins', - 'poll-interval', - 'pool-size', - 'profile', - 'ps', - 'qs', - 'queue-size', - 'r', - 'resume', - 'revision', - 'stdin', - 'stub', - 'stub-run', - 'test', - 'w', - 'with-apptainer', - 'with-charliecloud', - 'with-conda', - 'with-dag', - 'with-docker', - 'with-mpi', - 'with-notification', - 'with-podman', - 'with-report', - 'with-singularity', - 'with-timeline', - 'with-tower', - 'with-trace', - 'with-weblog', - 'without-docker', - 'without-podman', - 'work-dir' - ] - def unexpectedParams = [] - - // Collect expected parameters from the schema - def expectedParams = [] - def enums = [:] - for (group in schemaParams) { - for (p in group.value['properties']) { - expectedParams.push(p.key) - if (group.value['properties'][p.key].containsKey('enum')) { - enums[p.key] = group.value['properties'][p.key]['enum'] - } - } - } - - for (specifiedParam in params.keySet()) { - // nextflow params - if (nf_params.contains(specifiedParam)) { - log.error "ERROR: You used a core Nextflow option with two hyphens: '--${specifiedParam}'. Please resubmit with '-${specifiedParam}'" - has_error = true - } - // unexpected params - def params_ignore = params.schema_ignore_params.split(',') + 'schema_ignore_params' - def expectedParamsLowerCase = expectedParams.collect{ it.replace("-", "").toLowerCase() } - def specifiedParamLowerCase = specifiedParam.replace("-", "").toLowerCase() - def isCamelCaseBug = (specifiedParam.contains("-") && !expectedParams.contains(specifiedParam) && expectedParamsLowerCase.contains(specifiedParamLowerCase)) - if (!expectedParams.contains(specifiedParam) && !params_ignore.contains(specifiedParam) && !isCamelCaseBug) { - // Temporarily remove camelCase/camel-case params #1035 - def unexpectedParamsLowerCase = unexpectedParams.collect{ it.replace("-", "").toLowerCase()} - if (!unexpectedParamsLowerCase.contains(specifiedParamLowerCase)){ - unexpectedParams.push(specifiedParam) - } - } - } - - //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~// - // Validate parameters against the schema - InputStream input_stream = new File(getSchemaPath(workflow, schema_filename=schema_filename)).newInputStream() - JSONObject raw_schema = new JSONObject(new JSONTokener(input_stream)) - - // Remove anything that's in params.schema_ignore_params - raw_schema = removeIgnoredParams(raw_schema, params) - - Schema schema = SchemaLoader.load(raw_schema) - - // Clean the parameters - def cleanedParams = cleanParameters(params) - - // Convert to JSONObject - def jsonParams = new JsonBuilder(cleanedParams) - JSONObject params_json = new JSONObject(jsonParams.toString()) - - // Validate - try { - schema.validate(params_json) - } catch (ValidationException e) { - println '' - log.error 'ERROR: Validation of pipeline parameters failed!' - JSONObject exceptionJSON = e.toJSON() - printExceptions(exceptionJSON, params_json, log, enums) - println '' - has_error = true - } - - // Check for unexpected parameters - if (unexpectedParams.size() > 0) { - Map colors = NfcoreTemplate.logColours(params.monochrome_logs) - println '' - def warn_msg = 'Found unexpected parameters:' - for (unexpectedParam in unexpectedParams) { - warn_msg = warn_msg + "\n* --${unexpectedParam}: ${params[unexpectedParam].toString()}" - } - log.warn warn_msg - log.info "- ${colors.dim}Ignore this warning: params.schema_ignore_params = \"${unexpectedParams.join(',')}\" ${colors.reset}" - println '' - } - - if (has_error) { - Nextflow.error('Exiting!') - } - } - - // - // Beautify parameters for --help - // - public static String paramsHelp(workflow, params, command, schema_filename='nextflow_schema.json') { - Map colors = NfcoreTemplate.logColours(params.monochrome_logs) - Integer num_hidden = 0 - String output = '' - output += 'Typical pipeline command:\n\n' - output += " ${colors.cyan}${command}${colors.reset}\n\n" - Map params_map = paramsLoad(getSchemaPath(workflow, schema_filename=schema_filename)) - Integer max_chars = paramsMaxChars(params_map) + 1 - Integer desc_indent = max_chars + 14 - Integer dec_linewidth = 160 - desc_indent - for (group in params_map.keySet()) { - Integer num_params = 0 - String group_output = colors.underlined + colors.bold + group + colors.reset + '\n' - def group_params = params_map.get(group) // This gets the parameters of that particular group - for (param in group_params.keySet()) { - if (group_params.get(param).hidden && !params.show_hidden_params) { - num_hidden += 1 - continue; - } - def type = '[' + group_params.get(param).type + ']' - def description = group_params.get(param).description - def defaultValue = group_params.get(param).default != null ? " [default: " + group_params.get(param).default.toString() + "]" : '' - def description_default = description + colors.dim + defaultValue + colors.reset - // Wrap long description texts - // Loosely based on https://dzone.com/articles/groovy-plain-text-word-wrap - if (description_default.length() > dec_linewidth){ - List olines = [] - String oline = "" // " " * indent - description_default.split(" ").each() { wrd -> - if ((oline.size() + wrd.size()) <= dec_linewidth) { - oline += wrd + " " - } else { - olines += oline - oline = wrd + " " - } - } - olines += oline - description_default = olines.join("\n" + " " * desc_indent) - } - group_output += " --" + param.padRight(max_chars) + colors.dim + type.padRight(10) + colors.reset + description_default + '\n' - num_params += 1 - } - group_output += '\n' - if (num_params > 0){ - output += group_output - } - } - if (num_hidden > 0){ - output += colors.dim + "!! Hiding $num_hidden params, use --show_hidden_params to show them !!\n" + colors.reset - } - output += NfcoreTemplate.dashedLine(params.monochrome_logs) - return output - } - - // - // Groovy Map summarising parameters/workflow options used by the pipeline - // - public static LinkedHashMap paramsSummaryMap(workflow, params, schema_filename='nextflow_schema.json') { - // Get a selection of core Nextflow workflow options - def Map workflow_summary = [:] - if (workflow.revision) { - workflow_summary['revision'] = workflow.revision - } - workflow_summary['runName'] = workflow.runName - if (workflow.containerEngine) { - workflow_summary['containerEngine'] = workflow.containerEngine - } - if (workflow.container) { - workflow_summary['container'] = workflow.container - } - workflow_summary['launchDir'] = workflow.launchDir - workflow_summary['workDir'] = workflow.workDir - workflow_summary['projectDir'] = workflow.projectDir - workflow_summary['userName'] = workflow.userName - workflow_summary['profile'] = workflow.profile - workflow_summary['configFiles'] = workflow.configFiles.join(', ') - - // Get pipeline parameters defined in JSON Schema - def Map params_summary = [:] - def params_map = paramsLoad(getSchemaPath(workflow, schema_filename=schema_filename)) - for (group in params_map.keySet()) { - def sub_params = new LinkedHashMap() - def group_params = params_map.get(group) // This gets the parameters of that particular group - for (param in group_params.keySet()) { - if (params.containsKey(param)) { - def params_value = params.get(param) - def schema_value = group_params.get(param).default - def param_type = group_params.get(param).type - if (schema_value != null) { - if (param_type == 'string') { - if (schema_value.contains('$projectDir') || schema_value.contains('${projectDir}')) { - def sub_string = schema_value.replace('\$projectDir', '') - sub_string = sub_string.replace('\${projectDir}', '') - if (params_value.contains(sub_string)) { - schema_value = params_value - } - } - if (schema_value.contains('$params.outdir') || schema_value.contains('${params.outdir}')) { - def sub_string = schema_value.replace('\$params.outdir', '') - sub_string = sub_string.replace('\${params.outdir}', '') - if ("${params.outdir}${sub_string}" == params_value) { - schema_value = params_value - } - } - } - } - - // We have a default in the schema, and this isn't it - if (schema_value != null && params_value != schema_value) { - sub_params.put(param, params_value) - } - // No default in the schema, and this isn't empty - else if (schema_value == null && params_value != "" && params_value != null && params_value != false) { - sub_params.put(param, params_value) - } - } - } - params_summary.put(group, sub_params) - } - return [ 'Core Nextflow options' : workflow_summary ] << params_summary - } - - // - // Beautify parameters for summary and return as string - // - public static String paramsSummaryLog(workflow, params) { - Map colors = NfcoreTemplate.logColours(params.monochrome_logs) - String output = '' - def params_map = paramsSummaryMap(workflow, params) - def max_chars = paramsMaxChars(params_map) - for (group in params_map.keySet()) { - def group_params = params_map.get(group) // This gets the parameters of that particular group - if (group_params) { - output += colors.bold + group + colors.reset + '\n' - for (param in group_params.keySet()) { - output += " " + colors.blue + param.padRight(max_chars) + ": " + colors.green + group_params.get(param) + colors.reset + '\n' - } - output += '\n' - } - } - output += "!! Only displaying parameters that differ from the pipeline defaults !!\n" - output += NfcoreTemplate.dashedLine(params.monochrome_logs) - return output - } - - // - // Loop over nested exceptions and print the causingException - // - private static void printExceptions(ex_json, params_json, log, enums, limit=5) { - def causingExceptions = ex_json['causingExceptions'] - if (causingExceptions.length() == 0) { - def m = ex_json['message'] =~ /required key \[([^\]]+)\] not found/ - // Missing required param - if (m.matches()) { - log.error "* Missing required parameter: --${m[0][1]}" - } - // Other base-level error - else if (ex_json['pointerToViolation'] == '#') { - log.error "* ${ex_json['message']}" - } - // Error with specific param - else { - def param = ex_json['pointerToViolation'] - ~/^#\// - def param_val = params_json[param].toString() - if (enums.containsKey(param)) { - def error_msg = "* --${param}: '${param_val}' is not a valid choice (Available choices" - if (enums[param].size() > limit) { - log.error "${error_msg} (${limit} of ${enums[param].size()}): ${enums[param][0..limit-1].join(', ')}, ... )" - } else { - log.error "${error_msg}: ${enums[param].join(', ')})" - } - } else { - log.error "* --${param}: ${ex_json['message']} (${param_val})" - } - } - } - for (ex in causingExceptions) { - printExceptions(ex, params_json, log, enums) - } - } - - // - // Remove an element from a JSONArray - // - private static JSONArray removeElement(json_array, element) { - def list = [] - int len = json_array.length() - for (int i=0;i - if(raw_schema.keySet().contains('definitions')){ - raw_schema.definitions.each { definition -> - for (key in definition.keySet()){ - if (definition[key].get("properties").keySet().contains(ignore_param)){ - // Remove the param to ignore - definition[key].get("properties").remove(ignore_param) - // If the param was required, change this - if (definition[key].has("required")) { - def cleaned_required = removeElement(definition[key].required, ignore_param) - definition[key].put("required", cleaned_required) - } - } - } - } - } - if(raw_schema.keySet().contains('properties') && raw_schema.get('properties').keySet().contains(ignore_param)) { - raw_schema.get("properties").remove(ignore_param) - } - if(raw_schema.keySet().contains('required') && raw_schema.required.contains(ignore_param)) { - def cleaned_required = removeElement(raw_schema.required, ignore_param) - raw_schema.put("required", cleaned_required) - } - } - return raw_schema - } - - // - // Clean and check parameters relative to Nextflow native classes - // - private static Map cleanParameters(params) { - def new_params = params.getClass().newInstance(params) - for (p in params) { - // remove anything evaluating to false - if (!p['value']) { - new_params.remove(p.key) - } - // Cast MemoryUnit to String - if (p['value'].getClass() == nextflow.util.MemoryUnit) { - new_params.replace(p.key, p['value'].toString()) - } - // Cast Duration to String - if (p['value'].getClass() == nextflow.util.Duration) { - new_params.replace(p.key, p['value'].toString().replaceFirst(/d(?!\S)/, "day")) - } - // Cast LinkedHashMap to String - if (p['value'].getClass() == LinkedHashMap) { - new_params.replace(p.key, p['value'].toString()) - } - } - return new_params - } - - // - // This function tries to read a JSON params file - // - private static LinkedHashMap paramsLoad(String json_schema) { - def params_map = new LinkedHashMap() - try { - params_map = paramsRead(json_schema) - } catch (Exception e) { - println "Could not read parameters settings from JSON. $e" - params_map = new LinkedHashMap() - } - return params_map - } - - // - // Method to actually read in JSON file using Groovy. - // Group (as Key), values are all parameters - // - Parameter1 as Key, Description as Value - // - Parameter2 as Key, Description as Value - // .... - // Group - // - - private static LinkedHashMap paramsRead(String json_schema) throws Exception { - def json = new File(json_schema).text - def Map schema_definitions = (Map) new JsonSlurper().parseText(json).get('definitions') - def Map schema_properties = (Map) new JsonSlurper().parseText(json).get('properties') - /* Tree looks like this in nf-core schema - * definitions <- this is what the first get('definitions') gets us - group 1 - title - description - properties - parameter 1 - type - description - parameter 2 - type - description - group 2 - title - description - properties - parameter 1 - type - description - * properties <- parameters can also be ungrouped, outside of definitions - parameter 1 - type - description - */ - - // Grouped params - def params_map = new LinkedHashMap() - schema_definitions.each { key, val -> - def Map group = schema_definitions."$key".properties // Gets the property object of the group - def title = schema_definitions."$key".title - def sub_params = new LinkedHashMap() - group.each { innerkey, value -> - sub_params.put(innerkey, value) - } - params_map.put(title, sub_params) - } - - // Ungrouped params - def ungrouped_params = new LinkedHashMap() - schema_properties.each { innerkey, value -> - ungrouped_params.put(innerkey, value) - } - params_map.put("Other parameters", ungrouped_params) - - return params_map - } - - // - // Get maximum number of characters across all parameter names - // - private static Integer paramsMaxChars(params_map) { - Integer max_chars = 0 - for (group in params_map.keySet()) { - def group_params = params_map.get(group) // This gets the parameters of that particular group - for (param in group_params.keySet()) { - if (param.size() > max_chars) { - max_chars = param.size() - } - } - } - return max_chars - } -} diff --git a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy index 2cb8b41388..a1a726d69f 100755 --- a/nf_core/pipeline-template/lib/NfcoreTemplate.groovy +++ b/nf_core/pipeline-template/lib/NfcoreTemplate.groovy @@ -128,7 +128,7 @@ class NfcoreTemplate { def email_html = html_template.toString() // Render the sendmail template - def max_multiqc_email_size = params.max_multiqc_email_size as nextflow.util.MemoryUnit + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "$projectDir", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes() ] def sf = new File("$projectDir/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) diff --git a/nf_core/pipeline-template/lib/WorkflowMain.groovy b/nf_core/pipeline-template/lib/WorkflowMain.groovy index 4cb7409fb9..5824dae2fb 100755 --- a/nf_core/pipeline-template/lib/WorkflowMain.groovy +++ b/nf_core/pipeline-template/lib/WorkflowMain.groovy @@ -20,44 +20,11 @@ class WorkflowMain { " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" } - // - // Generate help string - // - public static String help(workflow, params) { - {% if igenomes -%} - def command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" - {% else -%} - def command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --fasta reference.fa -profile docker" - {% endif -%} - def help_string = '' - help_string += NfcoreTemplate.logo(workflow, params.monochrome_logs) - help_string += NfcoreSchema.paramsHelp(workflow, params, command) - help_string += '\n' + citation(workflow) + '\n' - help_string += NfcoreTemplate.dashedLine(params.monochrome_logs) - return help_string - } - - // - // Generate parameter summary log string - // - public static String paramsSummaryLog(workflow, params) { - def summary_log = '' - summary_log += NfcoreTemplate.logo(workflow, params.monochrome_logs) - summary_log += NfcoreSchema.paramsSummaryLog(workflow, params) - summary_log += '\n' + citation(workflow) + '\n' - summary_log += NfcoreTemplate.dashedLine(params.monochrome_logs) - return summary_log - } // // Validate parameters and print summary to screen // public static void initialise(workflow, params, log) { - // Print help to screen if required - if (params.help) { - log.info help(workflow, params) - System.exit(0) - } // Print workflow version and exit on --version if (params.version) { @@ -66,14 +33,6 @@ class WorkflowMain { System.exit(0) } - // Print parameter summary log to screen - log.info paramsSummaryLog(workflow, params) - - // Validate workflow parameters via the JSON schema - if (params.validate_params) { - NfcoreSchema.validateParameters(workflow, params, log) - } - // Check that a -profile or Nextflow config has been provided to run the pipeline NfcoreTemplate.checkConfigProvided(workflow, log) @@ -90,8 +49,8 @@ class WorkflowMain { Nextflow.error("Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'") } } - {% if igenomes -%} + {%- if igenomes %} // // Get attribute from genome config file e.g. fasta // @@ -103,4 +62,5 @@ class WorkflowMain { } return null } -{% endif -%}} + {%- endif %} +} diff --git a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy index 6e577a669c..3a92e04d94 100755 --- a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy +++ b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy @@ -11,7 +11,7 @@ class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { // Check and validate parameters // public static void initialise(params, log) { - {% if igenomes -%} +{% if igenomes %} genomeExistsError(params, log) {% endif %} @@ -47,15 +47,57 @@ class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { return yaml_file_text } - public static String methodsDescriptionText(run_workflow, mqc_methods_yaml) { + // + // Generate methods description for MultiQC + // + + public static String toolCitationText(params) { + + // TODO Optionally add in-text citation tools to this list. + // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "", + // Uncomment function in methodsDescriptionText to render in MultiQC report + def citation_text = [ + "Tools used in the workflow included:", + "FastQC (Andrews 2010),", + "MultiQC (Ewels et al. 2016)", + "." + ].join(' ').trim() + + return citation_text + } + + public static String toolBibliographyText(params) { + + // TODO Optionally add bibliographic entries to this list. + // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "
  • Author (2023) Pub name, Journal, DOI
  • " : "", + // Uncomment function in methodsDescriptionText to render in MultiQC report + def reference_text = [ + "
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ", + "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • " + ].join(' ').trim() + + return reference_text + } + + public static String methodsDescriptionText(run_workflow, mqc_methods_yaml, params) { // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file def meta = [:] meta.workflow = run_workflow.toMap() meta["manifest_map"] = run_workflow.manifest.toMap() + // Pipeline DOI meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " + // Tool references + meta["tool_citations"] = "" + meta["tool_bibliography"] = "" + + // TODO Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled! + //meta["tool_citations"] = toolCitationText(params).replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".") + //meta["tool_bibliography"] = toolBibliographyText(params) + + def methods_text = mqc_methods_yaml.text def engine = new SimpleTemplateEngine() diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 52d8b1bb38..73984cb8fc 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -7,7 +7,7 @@ {%- if branded %} Website: https://nf-co.re/{{ short_name }} Slack : https://nfcore.slack.com/channels/{{ short_name }} -{% endif -%} +{%- endif %} ---------------------------------------------------------------------------------------- */ @@ -27,6 +27,22 @@ params.fasta = WorkflowMain.getGenomeAttribute(params, 'fasta') ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ +include { validateParameters; paramsHelp } from 'plugin/nf-validation' + +// Print help message if needed +if (params.help) { + def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) + def citation = '\n' + WorkflowMain.citation(workflow) + '\n' + def String command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" + log.info logo + paramsHelp(command) + citation + NfcoreTemplate.dashedLine(params.monochrome_logs) + System.exit(0) +} + +// Validate input parameters +if (params.validate_params) { + validateParameters() +} + WorkflowMain.initialise(workflow, params, log) /* diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 4ef0fcd5e8..ae62a6ecac 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -13,12 +13,14 @@ params { // Input options input = null -{% if igenomes %} + {%- if igenomes %} // References genome = null igenomes_base = 's3://ngi-igenomes/igenomes' igenomes_ignore = false - {% endif -%} + {% else %} + fasta = null + {%- endif %} // MultiQC options multiqc_config = null @@ -29,7 +31,6 @@ params { // Boilerplate options outdir = null - tracedir = "${params.outdir}/pipeline_info" publish_dir_mode = 'copy' email = null email_on_fail = null @@ -38,26 +39,31 @@ params { hook_url = null help = false version = false - validate_params = true - show_hidden_params = false - schema_ignore_params = 'genomes' -{% if nf_core_configs %} // Config options + config_profile_name = null + config_profile_description = null + + {%- if nf_core_configs %} custom_config_version = 'master' custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" - config_profile_description = null config_profile_contact = null config_profile_url = null - config_profile_name = null + {% endif %} -{% endif %} // Max resource options // Defaults only, expecting to be overwritten max_memory = '128.GB' max_cpus = 16 max_time = '240.h' + // Schema validation default options + validationFailUnrecognisedParams = false + validationLenientMode = false + validationSchemaIgnoreParams = 'genomes' + validationShowHiddenParams = false + validate_params = true + } // Load base.config by default for all pipelines @@ -78,13 +84,13 @@ try { // } catch (Exception e) { // System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") // } -{% endif %} +{% endif -%} profiles { debug { dumpHashes = true process.beforeScript = 'echo $HOSTNAME' - cleanup = false + cleanup = false } conda { conda.enabled = true @@ -107,7 +113,6 @@ profiles { } docker { docker.enabled = true - docker.registry = 'quay.io' docker.userEmulation = true conda.enabled = false singularity.enabled = false @@ -131,7 +136,6 @@ profiles { } podman { podman.enabled = true - podman.registry = 'quay.io' conda.enabled = false docker.enabled = false singularity.enabled = false @@ -175,14 +179,27 @@ profiles { test_full { includeConfig 'conf/test_full.config' } } -{% if igenomes %} +// Set default registry for Apptainer, Docker, Podman and Singularity independent of -profile +// Will not be used unless Apptainer / Docker / Podman / Singularity are enabled +// Set to your registry if you have a mirror of containers +apptainer.registry = 'quay.io' +docker.registry = 'quay.io' +podman.registry = 'quay.io' +singularity.registry = 'quay.io' + +// Nextflow plugins +plugins { + id 'nf-validation' // Validation of pipeline parameters and creation of an input channel from a sample sheet +} + +{% if igenomes -%} // Load igenomes.config if required if (!params.igenomes_ignore) { includeConfig 'conf/igenomes.config' } else { params.genomes = [:] } -{% endif %} +{% endif -%} // Export these variables to prevent local Python/R libraries from conflicting with those in the container // The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container. @@ -201,19 +218,19 @@ process.shell = ['/bin/bash', '-euo', 'pipefail'] def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true - file = "${params.tracedir}/execution_timeline_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_timeline_${trace_timestamp}.html" } report { enabled = true - file = "${params.tracedir}/execution_report_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_report_${trace_timestamp}.html" } trace { enabled = true - file = "${params.tracedir}/execution_trace_${trace_timestamp}.txt" + file = "${params.outdir}/pipeline_info/execution_trace_${trace_timestamp}.txt" } dag { enabled = true - file = "${params.tracedir}/pipeline_dag_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/pipeline_dag_${trace_timestamp}.html" } manifest { @@ -222,7 +239,7 @@ manifest { homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=22.10.1' + nextflowVersion = '!>=23.04.0' version = '{{ version }}' doi = '' } diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 2743562d6c..319503c37f 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -15,9 +15,9 @@ "input": { "type": "string", "format": "file-path", + "exists": true, "mimetype": "text/csv", "pattern": "^\\S+\\.csv$", - "schema": "assets/schema_input.json", "description": "Path to comma-separated file containing information about the samples in the experiment.", "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row.{% if branded %} See [usage docs](https://nf-co.re/{{ short_name }}/usage#samplesheet-input).{% endif %}", "fa_icon": "fas fa-file-csv" @@ -57,6 +57,7 @@ "fasta": { "type": "string", "format": "file-path", + "exists": true, "mimetype": "text/plain", "pattern": "^\\S+\\.fn?a(sta)?(\\.gz)?$", "description": "Path to FASTA genome file.", @@ -157,7 +158,7 @@ "description": "Maximum amount of time that can be requested for any single job.", "default": "240.h", "fa_icon": "far fa-clock", - "pattern": "^(\\d+\\.?\\s*(s|m|h|day)\\s*)+$", + "pattern": "^(\\d+\\.?\\s*(s|m|h|d|day)\\s*)+$", "hidden": true, "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" } @@ -174,12 +175,14 @@ "type": "boolean", "description": "Display help text.", "fa_icon": "fas fa-question-circle", + "default": false, "hidden": true }, "version": { "type": "boolean", "description": "Display version and exit.", "fa_icon": "fas fa-question-circle", + "default": false, "hidden": true }, "publish_dir_mode": { @@ -203,6 +206,7 @@ "type": "boolean", "description": "Send plain-text email instead of HTML.", "fa_icon": "fas fa-remove-format", + "default": false, "hidden": true }, "max_multiqc_email_size": { @@ -217,6 +221,7 @@ "type": "boolean", "description": "Do not use coloured log outputs.", "fa_icon": "fas fa-palette", + "default": false, "hidden": true }, "hook_url": { @@ -228,6 +233,7 @@ }, "multiqc_config": { "type": "string", + "format": "file-path", "description": "Custom config file to supply to MultiQC.", "fa_icon": "fas fa-cog", "hidden": true @@ -243,13 +249,6 @@ "description": "Custom MultiQC yaml file containing HTML including a methods description.", "fa_icon": "fas fa-cog" }, - "tracedir": { - "type": "string", - "description": "Directory to keep pipeline Nextflow logs and reports.", - "default": "${params.outdir}/pipeline_info", - "fa_icon": "fas fa-cogs", - "hidden": true - }, "validate_params": { "type": "boolean", "description": "Boolean whether to validate parameters against the schema at runtime", @@ -257,12 +256,29 @@ "fa_icon": "fas fa-check-square", "hidden": true }, - "show_hidden_params": { + "validationShowHiddenParams": { "type": "boolean", "fa_icon": "far fa-eye-slash", "description": "Show all params when using `--help`", + "default": false, "hidden": true, "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." + }, + "validationFailUnrecognisedParams": { + "type": "boolean", + "fa_icon": "far fa-check-circle", + "description": "Validation of parameters fails when an unrecognised parameter is found.", + "default": false, + "hidden": true, + "help_text": "By default, when an unrecognised parameter is found, it returns a warinig." + }, + "validationLenientMode": { + "type": "boolean", + "fa_icon": "far fa-check-circle", + "description": "Validation of parameters in lenient more.", + "default": false, + "hidden": true, + "help_text": "Allows string values that are parseable as numbers or booleans. For further information see [JSONSchema docs](https://github.com/everit-org/json-schema#lenient-mode)." } } } diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 9bcc0086b5..9dfb0155f1 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -1,21 +1,19 @@ /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - VALIDATE INPUTS + PRINT PARAMS SUMMARY ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -def summary_params = NfcoreSchema.paramsSummaryMap(workflow, params) +include { paramsSummaryLog; paramsSummaryMap } from 'plugin/nf-validation' -// Validate input parameters -Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.initialise(params, log) +def logo = NfcoreTemplate.logo(workflow, params.monochrome_logs) +def citation = '\n' + WorkflowMain.citation(workflow) + '\n' +def summary_params = paramsSummaryMap(workflow) -// TODO nf-core: Add all file path parameters for the pipeline to the list below -// Check input path parameters to see if they exist -def checkPathParamList = [ params.input, params.multiqc_config, params.fasta ] -for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } } +// Print parameter summary log to screen +log.info logo + paramsSummaryLog(workflow) + citation -// Check mandatory parameters -if (params.input) { ch_input = file(params.input) } else { exit 1, 'Input samplesheet not specified!' } +Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.initialise(params, log) /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -69,9 +67,12 @@ workflow {{ short_name|upper }} { // SUBWORKFLOW: Read in samplesheet, validate and stage input files // INPUT_CHECK ( - ch_input + file(params.input) ) ch_versions = ch_versions.mix(INPUT_CHECK.out.versions) + // TODO: OPTIONAL, you can use nf-validation plugin to create an input channel from the samplesheet with Channel.fromSamplesheet("input") + // See the documentation https://nextflow-io.github.io/nf-validation/samplesheets/fromSamplesheet/ + // ! There is currently no tooling to help you write a sample sheet schema // // MODULE: Run FastQC @@ -91,7 +92,7 @@ workflow {{ short_name|upper }} { workflow_summary = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.paramsSummaryMultiqc(workflow, summary_params) ch_workflow_summary = Channel.value(workflow_summary) - methods_description = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description) + methods_description = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description, params) ch_methods_description = Channel.value(methods_description) ch_multiqc_files = Channel.empty() diff --git a/nf_core/refgenie.py b/nf_core/refgenie.py index b666844699..6f09b75532 100644 --- a/nf_core/refgenie.py +++ b/nf_core/refgenie.py @@ -184,7 +184,7 @@ def update_config(rgc): # Save the updated genome config try: - with open(refgenie_genomes_config_file, "w") as fh: + with open(refgenie_genomes_config_file, "w+") as fh: fh.write(refgenie_genomes) log.info(f"Updated nf-core genomes config: {refgenie_genomes_config_file}") except FileNotFoundError: diff --git a/nf_core/schema.py b/nf_core/schema.py index ba88e762ea..b00697334b 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -238,6 +238,11 @@ def validate_default_params(self): jsonschema.validate(self.schema_defaults, schema_no_required) except jsonschema.exceptions.ValidationError as e: raise AssertionError(f"Default parameters are invalid: {e.message}") + for param, default in self.schema_defaults.items(): + if default in ("null", "", None, "None"): + log.warning( + f"[yellow][!] Default parameter '{param}' is empty or null. It is advisable to remove the default from the schema" + ) log.info("[green][✓] Default parameters match schema validation") # Make sure every default parameter exists in the nextflow.config and is of correct type @@ -245,8 +250,8 @@ def validate_default_params(self): self.get_wf_params() # Collect parameters to ignore - if "schema_ignore_params" in self.pipeline_params: - params_ignore = self.pipeline_params.get("schema_ignore_params", "").strip("\"'").split(",") + if "validationSchemaIgnoreParams" in self.pipeline_params: + params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") else: params_ignore = [] @@ -759,8 +764,8 @@ def add_schema_found_configs(self): Add anything that's found in the Nextflow params that's missing in the pipeline schema """ params_added = [] - params_ignore = self.pipeline_params.get("schema_ignore_params", "").strip("\"'").split(",") - params_ignore.append("schema_ignore_params") + params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") + params_ignore.append("validationSchemaIgnoreParams") for p_key, p_val in self.pipeline_params.items(): # Check if key is in schema parameters if p_key not in self.schema_params and p_key not in params_ignore: diff --git a/nf_core/subworkflow-template/subworkflows/meta.yml b/nf_core/subworkflow-template/subworkflows/meta.yml index 4c5b454ddf..ae1689805d 100644 --- a/nf_core/subworkflow-template/subworkflows/meta.yml +++ b/nf_core/subworkflow-template/subworkflows/meta.yml @@ -17,7 +17,7 @@ input: type: map description: | Groovy Map containing sample information - e.g. [ id:'test' ] + e.g. `[ id:'test' ]` - bam: type: file description: BAM/CRAM/SAM file @@ -28,7 +28,7 @@ output: type: map description: | Groovy Map containing sample information - e.g. [ id:'test' ] + e.g. `[ id:'test' ]` - bam: type: file description: Sorted BAM/CRAM/SAM file diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py new file mode 100644 index 0000000000..f78142c031 --- /dev/null +++ b/nf_core/synced_repo.py @@ -0,0 +1,418 @@ +import filecmp +import logging +import os +import shutil +from pathlib import Path + +import git +import rich +import rich.progress +from git.exc import GitCommandError + +from nf_core.utils import load_tools_config + +log = logging.getLogger(__name__) + +# Constants for the nf-core/modules repo used throughout the module files +NF_CORE_MODULES_NAME = "nf-core" +NF_CORE_MODULES_REMOTE = "https://github.com/nf-core/modules.git" +NF_CORE_MODULES_DEFAULT_BRANCH = "master" + + +class RemoteProgressbar(git.RemoteProgress): + """ + An object to create a progressbar for when doing an operation with the remote. + Note that an initialized rich Progress (progress bar) object must be passed + during initialization. + """ + + def __init__(self, progress_bar, repo_name, remote_url, operation): + """ + Initializes the object and adds a task to the progressbar passed as 'progress_bar' + + Args: + progress_bar (rich.progress.Progress): A rich progress bar object + repo_name (str): Name of the repository the operation is performed on + remote_url (str): Git URL of the repository the operation is performed on + operation (str): The operation performed on the repository, i.e. 'Pulling', 'Cloning' etc. + """ + super().__init__() + self.progress_bar = progress_bar + self.tid = self.progress_bar.add_task( + f"{operation} from [bold green]'{repo_name}'[/bold green] ([link={remote_url}]{remote_url}[/link])", + start=False, + state="Waiting for response", + ) + + def update(self, op_code, cur_count, max_count=None, message=""): + """ + Overrides git.RemoteProgress.update. + Called every time there is a change in the remote operation + """ + if not self.progress_bar.tasks[self.tid].started: + self.progress_bar.start_task(self.tid) + self.progress_bar.update( + self.tid, total=max_count, completed=cur_count, state=f"{cur_count / max_count * 100:.1f}%" + ) + + +class SyncedRepo: + """ + An object to store details about a locally cached code repository. + """ + + local_repo_statuses = {} + no_pull_global = False + + @staticmethod + def local_repo_synced(repo_name): + """ + Checks whether a local repo has been cloned/pull in the current session + """ + return SyncedRepo.local_repo_statuses.get(repo_name, False) + + @staticmethod + def update_local_repo_status(repo_name, up_to_date): + """ + Updates the clone/pull status of a local repo + """ + SyncedRepo.local_repo_statuses[repo_name] = up_to_date + + @staticmethod + def get_remote_branches(remote_url): + """ + Get all branches from a remote repository + + Args: + remote_url (str): The git url to the remote repository + + Returns: + (set[str]): All branches found in the remote + """ + try: + unparsed_branches = git.Git().ls_remote(remote_url) + except git.GitCommandError: + raise LookupError(f"Was unable to fetch branches from '{remote_url}'") + else: + branches = {} + for branch_info in unparsed_branches.split("\n"): + sha, name = branch_info.split("\t") + if name != "HEAD": + # The remote branches are shown as 'ref/head/branch' + branch_name = Path(name).stem + branches[sha] = branch_name + return set(branches.values()) + + def __init__(self, remote_url=None, branch=None, no_pull=False, hide_progress=False): + """ + Initializes the object and clones the git repository if it is not already present + """ + + # This allows us to set this one time and then keep track of the user's choice + SyncedRepo.no_pull_global |= no_pull + + # Check if the remote seems to be well formed + if remote_url is None: + remote_url = NF_CORE_MODULES_REMOTE + + self.remote_url = remote_url + + self.fullname = nf_core.modules.modules_utils.repo_full_name_from_remote(self.remote_url) + + self.setup_local_repo(remote_url, branch, hide_progress) + + config_fn, repo_config = load_tools_config(self.local_repo_dir) + try: + self.repo_path = repo_config["org_path"] + except KeyError: + raise UserWarning(f"'org_path' key not present in {config_fn.name}") + + # Verify that the repo seems to be correctly configured + if self.repo_path != NF_CORE_MODULES_NAME or self.branch: + self.verify_branch() + + # Convenience variable + self.modules_dir = os.path.join(self.local_repo_dir, "modules", self.repo_path) + self.subworkflows_dir = os.path.join(self.local_repo_dir, "subworkflows", self.repo_path) + + self.avail_module_names = None + + def verify_sha(self, prompt, sha): + """ + Verify that 'sha' and 'prompt' arguments are not provided together. + Verify that the provided SHA exists in the repo. + + Arguments: + prompt (bool): prompt asking for SHA + sha (str): provided sha + """ + if prompt and sha is not None: + log.error("Cannot use '--sha' and '--prompt' at the same time!") + return False + + if sha: + if not self.sha_exists_on_branch(sha): + log.error(f"Commit SHA '{sha}' doesn't exist in '{self.remote_url}'") + return False + + return True + + def setup_branch(self, branch): + """ + Verify that we have a branch and otherwise use the default one. + The branch is then checked out to verify that it exists in the repo. + + Args: + branch (str): Name of branch + """ + if branch is None: + # Don't bother fetching default branch if we're using nf-core + if self.remote_url == NF_CORE_MODULES_REMOTE: + self.branch = "master" + else: + self.branch = self.get_default_branch() + else: + self.branch = branch + + # Verify that the branch exists by checking it out + self.branch_exists() + + def get_default_branch(self): + """ + Gets the default branch for the repo (the branch origin/HEAD is pointing to) + """ + origin_head = next(ref for ref in self.repo.refs if ref.name == "origin/HEAD") + _, branch = origin_head.ref.name.split("/") + return branch + + def branch_exists(self): + """ + Verifies that the branch exists in the repository by trying to check it out + """ + try: + self.checkout_branch() + except GitCommandError: + raise LookupError(f"Branch '{self.branch}' not found in '{self.remote_url}'") + + def verify_branch(self): + """ + Verifies the active branch conforms to the correct directory structure + """ + dir_names = os.listdir(self.local_repo_dir) + if "modules" not in dir_names: + err_str = f"Repository '{self.remote_url}' ({self.branch}) does not contain the 'modules/' directory" + if "software" in dir_names: + err_str += ( + ".\nAs of nf-core/tools version 2.0, the 'software/' directory should be renamed to 'modules/'" + ) + raise LookupError(err_str) + + def checkout_branch(self): + """ + Checks out the specified branch of the repository + """ + self.repo.git.checkout(self.branch) + + def checkout(self, commit): + """ + Checks out the repository at the requested commit + + Args: + commit (str): Git SHA of the commit + """ + self.repo.git.checkout(commit) + + def component_exists(self, component_name, component_type, checkout=True, commit=None): + """ + Check if a module/subworkflow exists in the branch of the repo + + Args: + component_name (str): The name of the module/subworkflow + + Returns: + (bool): Whether the module/subworkflow exists in this branch of the repository + """ + return component_name in self.get_avail_components(component_type, checkout=checkout, commit=commit) + + def get_component_dir(self, component_name, component_type): + """ + Returns the file path of a module/subworkflow directory in the repo. + Does not verify that the path exists. + Args: + component_name (str): The name of the module/subworkflow + + Returns: + component_path (str): The path of the module/subworkflow in the local copy of the repository + """ + if component_type == "modules": + return os.path.join(self.modules_dir, component_name) + elif component_type == "subworkflows": + return os.path.join(self.subworkflows_dir, component_name) + + def install_component(self, component_name, install_dir, commit, component_type): + """ + Install the module/subworkflow files into a pipeline at the given commit + + Args: + component_name (str): The name of the module/subworkflow + install_dir (str): The path where the module/subworkflow should be installed + commit (str): The git SHA for the version of the module/subworkflow to be installed + + Returns: + (bool): Whether the operation was successful or not + """ + # Check out the repository at the requested ref + try: + self.checkout(commit) + except git.GitCommandError: + return False + + # Check if the module/subworkflow exists in the branch + if not self.component_exists(component_name, component_type, checkout=False): + log.error( + f"The requested {component_type[:-1]} does not exists in the branch '{self.branch}' of {self.remote_url}'" + ) + return False + + # Copy the files from the repo to the install folder + shutil.copytree(self.get_component_dir(component_name, component_type), Path(install_dir, component_name)) + + # Switch back to the tip of the branch + self.checkout_branch() + return True + + def module_files_identical(self, module_name, base_path, commit): + """ + Checks whether the module files in a pipeline are identical to the ones in the remote + Args: + module_name (str): The name of the module + base_path (str): The path to the module in the pipeline + + Returns: + (bool): Whether the pipeline files are identical to the repo files + """ + if commit is None: + self.checkout_branch() + else: + self.checkout(commit) + module_files = ["main.nf", "meta.yml"] + files_identical = {file: True for file in module_files} + module_dir = self.get_component_dir(module_name, "modules") + for file in module_files: + try: + files_identical[file] = filecmp.cmp(os.path.join(module_dir, file), os.path.join(base_path, file)) + except FileNotFoundError: + log.debug(f"Could not open file: {os.path.join(module_dir, file)}") + continue + self.checkout_branch() + return files_identical + + def get_component_git_log(self, component_name, component_type, depth=None): + """ + Fetches the commit history the of requested module/subworkflow since a given date. The default value is + not arbitrary - it is the last time the structure of the nf-core/modules repository was had an + update breaking backwards compatibility. + Args: + component_name (str): Name of module/subworkflow + modules_repo (SyncedRepo): A SyncedRepo object configured for the repository in question + + Returns: + ( dict ): Iterator of commit SHAs and associated (truncated) message + """ + self.checkout_branch() + component_path = os.path.join(component_type, self.repo_path, component_name) + commits_new = self.repo.iter_commits(max_count=depth, paths=component_path) + commits_new = [ + {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_new + ] + commits_old = [] + if component_type == "modules": + # Grab commits also from previous modules structure + component_path = os.path.join("modules", component_name) + commits_old = self.repo.iter_commits(max_count=depth, paths=component_path) + commits_old = [ + {"git_sha": commit.hexsha, "trunc_message": commit.message.partition("\n")[0]} for commit in commits_old + ] + commits = iter(commits_new + commits_old) + return commits + + def get_latest_component_version(self, component_name, component_type): + """ + Returns the latest commit in the repository + """ + return list(self.get_component_git_log(component_name, component_type, depth=1))[0]["git_sha"] + + def sha_exists_on_branch(self, sha): + """ + Verifies that a given commit sha exists on the branch + """ + self.checkout_branch() + return sha in (commit.hexsha for commit in self.repo.iter_commits()) + + def get_commit_info(self, sha): + """ + Fetches metadata about the commit (dates, message, etc.) + Args: + commit_sha (str): The SHA of the requested commit + Returns: + message (str): The commit message for the requested commit + date (str): The commit date for the requested commit + Raises: + LookupError: If the search for the commit fails + """ + self.checkout_branch() + for commit in self.repo.iter_commits(): + if commit.hexsha == sha: + message = commit.message.partition("\n")[0] + date_obj = commit.committed_datetime + date = str(date_obj.date()) + return message, date + raise LookupError(f"Commit '{sha}' not found in the '{self.remote_url}'") + + def get_avail_components(self, component_type, checkout=True, commit=None): + """ + Gets the names of the modules/subworkflows in the repository. They are detected by + checking which directories have a 'main.nf' file + + Returns: + ([ str ]): The module/subworkflow names + """ + if checkout: + self.checkout_branch() + if commit is not None: + self.checkout(commit) + # Get directory + if component_type == "modules": + directory = self.modules_dir + elif component_type == "subworkflows": + directory = self.subworkflows_dir + # Module/Subworkflow directories are characterized by having a 'main.nf' file + avail_component_names = [ + os.path.relpath(dirpath, start=directory) + for dirpath, _, file_names in os.walk(directory) + if "main.nf" in file_names + ] + return avail_component_names + + def get_meta_yml(self, component_type, module_name): + """ + Returns the contents of the 'meta.yml' file of a module + + Args: + module_name (str): The name of the module + + Returns: + (str): The contents of the file in text format + """ + self.checkout_branch() + if component_type == "modules": + path = Path(self.modules_dir, module_name, "meta.yml") + elif component_type == "subworkflows": + path = Path(self.subworkflows_dir, module_name, "meta.yml") + else: + raise ValueError(f"Invalid component type: {component_type}") + if not path.exists(): + return None + with open(path) as fh: + contents = fh.read() + return contents diff --git a/nf_core/utils.py b/nf_core/utils.py index 36c39db50a..31738edabe 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1,6 +1,7 @@ """ Common utility functions for the nf-core python package. """ +import concurrent.futures import datetime import errno import hashlib @@ -58,13 +59,19 @@ NFCORE_DIR = os.path.join(os.environ.get("XDG_CONFIG_HOME", os.path.join(os.getenv("HOME"), ".config")), "nfcore") +def fetch_remote_version(source_url): + response = requests.get(source_url, timeout=3) + remote_version = re.sub(r"[^0-9\.]", "", response.text) + return remote_version + + def check_if_outdated(current_version=None, remote_version=None, source_url="https://nf-co.re/tools_version"): """ Check if the current version of nf-core is outdated """ # Exit immediately if disabled via ENV var if os.environ.get("NFCORE_NO_VERSION_CHECK", False): - return True + return (True, "", "") # Set and clean up the current version string if current_version is None: current_version = nf_core.__version__ @@ -72,12 +79,18 @@ def check_if_outdated(current_version=None, remote_version=None, source_url="htt # Build the URL to check against source_url = os.environ.get("NFCORE_VERSION_URL", source_url) source_url = f"{source_url}?v={current_version}" - # Fetch and clean up the remote version - if remote_version is None: - response = requests.get(source_url, timeout=3) - remote_version = re.sub(r"[^0-9\.]", "", response.text) - # Check if we have an available update - is_outdated = Version(remote_version) > Version(current_version) + # check if we have a newer version without blocking the rest of the script + is_outdated = False + if remote_version is None: # we set it manually for tests + try: + with concurrent.futures.ThreadPoolExecutor() as executor: + future = executor.submit(fetch_remote_version, source_url) + remote_version = future.result() + except Exception as e: + log.debug(f"Could not check for nf-core updates: {e}") + if remote_version is not None: + if Version(remote_version) > Version(current_version): + is_outdated = True return (is_outdated, current_version, remote_version) @@ -245,7 +258,7 @@ def fetch_wf_config(wf_path, cache_config=True): if cache_basedir and cache_fn: cache_path = os.path.join(cache_basedir, cache_fn) - if os.path.isfile(cache_path): + if os.path.isfile(cache_path) and cache_config is True: log.debug(f"Found a config cache, loading: {cache_path}") with open(cache_path, "r") as fh: try: @@ -261,7 +274,7 @@ def fetch_wf_config(wf_path, cache_config=True): ul = l.decode("utf-8") try: k, v = ul.split(" = ", 1) - config[k] = v + config[k] = v.strip("'\"") except ValueError: log.debug(f"Couldn't find key=value config pair:\n {ul}") @@ -823,34 +836,65 @@ def prompt_remote_pipeline_name(wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") -def prompt_pipeline_release_branch(wf_releases, wf_branches): +def prompt_pipeline_release_branch(wf_releases, wf_branches, multiple=False): """Prompt for pipeline release / branch Args: wf_releases (array): Array of repo releases as returned by the GitHub API wf_branches (array): Array of repo branches, as returned by the GitHub API + multiple (bool): Allow selection of multiple releases & branches (for Tower) Returns: choice (str): Selected release / branch name """ - # Prompt user for release tag + # Prompt user for release tag, tag_set will contain all available. choices = [] + tag_set = [] # Releases if len(wf_releases) > 0: for tag in map(lambda release: release.get("tag_name"), wf_releases): tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] choices.append(questionary.Choice(title=tag_display, value=tag)) + tag_set.append(tag) # Branches for branch in wf_branches.keys(): branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] choices.append(questionary.Choice(title=branch_display, value=branch)) + tag_set.append(branch) if len(choices) == 0: return False - return questionary.select("Select release / branch:", choices=choices, style=nfcore_question_style).unsafe_ask() + if multiple: + return ( + questionary.checkbox("Select release / branch:", choices=choices, style=nfcore_question_style).unsafe_ask(), + tag_set, + ) + + else: + return ( + questionary.select("Select release / branch:", choices=choices, style=nfcore_question_style).unsafe_ask(), + tag_set, + ) + + +class SingularityCacheFilePathValidator(questionary.Validator): + """ + Validator for file path specified as --singularity-cache-index argument in nf-core download + """ + + def validate(self, value): + if len(value.text): + if os.path.isfile(value.text): + return True + else: + raise questionary.ValidationError( + message="Invalid remote cache index file", cursor_position=len(value.text) + ) + else: + return True def get_repo_releases_branches(pipeline, wfs): diff --git a/setup.py b/setup.py index 9b0d9fb6af..8e384ef21a 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.8" +version = "2.9" with open("README.md") as f: readme = f.read() diff --git a/tests/data/mock_config_containers/nextflow.config b/tests/data/mock_config_containers/nextflow.config new file mode 100644 index 0000000000..a761121746 --- /dev/null +++ b/tests/data/mock_config_containers/nextflow.config @@ -0,0 +1,29 @@ + + +// example from methylseq 1.0 +params.container = 'nfcore/methylseq:1.0' + +// example from methylseq 1.4 [Mercury Rattlesnake] +process.container = 'nfcore/methylseq:1.4' + +process { + + // example from Sarek 2.5 + + withName:Snpeff { + container = {(params.annotation_cache && params.snpEff_cache) ? 'nfcore/sarek:dev' : "nfcore/sareksnpeff:dev.${params.genome}"} + errorStrategy = {task.exitStatus == 143 ? 'retry' : 'ignore'} + } + withLabel:VEP { + container = {(params.annotation_cache && params.vep_cache) ? 'nfcore/sarek:dev' : "nfcore/sarekvep:dev.${params.genome}"} + errorStrategy = {task.exitStatus == 143 ? 'retry' : 'ignore'} + } + + // example from differentialabundance 1.2.0 + + withName: RMARKDOWNNOTEBOOK { + conda = "bioconda::r-shinyngs=1.7.1" + container = { "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/r-shinyngs:1.7.1--r42hdfd78af_1':'quay.io/biocontainers/r-shinyngs:1.7.1--r42hdfd78af_1' }" } + } + +} diff --git a/tests/data/pipeline_create_template_skip.yml b/tests/data/pipeline_create_template_skip.yml new file mode 100644 index 0000000000..b69175e0bb --- /dev/null +++ b/tests/data/pipeline_create_template_skip.yml @@ -0,0 +1,7 @@ +prefix: testprefix +skip: + - github + - ci + - github_badges + - igenomes + - nf_core_configs diff --git a/tests/data/testdata_remote_containers.txt b/tests/data/testdata_remote_containers.txt new file mode 100644 index 0000000000..93cf46f2f6 --- /dev/null +++ b/tests/data/testdata_remote_containers.txt @@ -0,0 +1,37 @@ +./depot.galaxyproject.org-singularity-bbmap-38.93--he522d1c_0.img +./depot.galaxyproject.org-singularity-bedtools-2.30.0--hc088bd4_0.img +./depot.galaxyproject.org-singularity-bioconductor-dupradar-1.18.0--r40_1.img +./depot.galaxyproject.org-singularity-bioconductor-summarizedexperiment-1.20.0--r40_0.img +./depot.galaxyproject.org-singularity-bioconductor-tximeta-1.8.0--r40_0.img +./depot.galaxyproject.org-singularity-fastqc-0.11.9--0.img +./depot.galaxyproject.org-singularity-gffread-0.12.1--h8b12597_0.img +./depot.galaxyproject.org-singularity-hisat2-2.2.1--h1b792b2_3.img +./depot.galaxyproject.org-singularity-mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2-59cdd445419f14abac76b31dd0d71217994cbcc9-0.img +./depot.galaxyproject.org-singularity-mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2-afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0.img +./depot.galaxyproject.org-singularity-mulled-v2-8849acf39a43cdd6c839a369a74c0adc823e2f91-ab110436faf952a33575c64dd74615a84011450b-0.img +./depot.galaxyproject.org-singularity-mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1-0e773bb207600fcb4d38202226eb20a33c7909b6-0.img +./depot.galaxyproject.org-singularity-mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1-38aed4501da19db366dc7c8d52d31d94e760cfaf-0.img +./depot.galaxyproject.org-singularity-mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b-64aad4a4e144878400649e71f42105311be7ed87-0.img +./depot.galaxyproject.org-singularity-multiqc-1.11--pyhdfd78af_0.img +./depot.galaxyproject.org-singularity-multiqc-1.13--pyhdfd78af_0.img +./depot.galaxyproject.org-singularity-perl-5.26.2.img +./depot.galaxyproject.org-singularity-picard-2.26.10--hdfd78af_0.img +./depot.galaxyproject.org-singularity-picard-2.27.4--hdfd78af_0.img +./depot.galaxyproject.org-singularity-preseq-3.1.2--h445547b_2.img +./depot.galaxyproject.org-singularity-python-3.9--1.img +./depot.galaxyproject.org-singularity-qualimap-2.2.2d--1.img +./depot.galaxyproject.org-singularity-rseqc-3.0.1--py37h516909a_1.img +./depot.galaxyproject.org-singularity-salmon-1.5.2--h84f40af_0.img +./depot.galaxyproject.org-singularity-samtools-1.15.1--h1170115_0.img +./depot.galaxyproject.org-singularity-sortmerna-4.3.4--h9ee0642_0.img +./depot.galaxyproject.org-singularity-stringtie-2.2.1--hecb563c_2.img +./depot.galaxyproject.org-singularity-subread-2.0.1--hed695b0_0.img +./depot.galaxyproject.org-singularity-trim-galore-0.6.7--hdfd78af_0.img +./depot.galaxyproject.org-singularity-ubuntu-20.04.img +./depot.galaxyproject.org-singularity-ucsc-bedclip-377--h0b8a92a_2.img +./depot.galaxyproject.org-singularity-ucsc-bedgraphtobigwig-377--h446ed27_1.img +./depot.galaxyproject.org-singularity-umi_tools-1.1.2--py38h4a8c8d9_0.img +These entries should not be used: +On October 5, 2011, the 224-meter containership MV Rena struck a reef close to New Zealand’s coast and broke apart. That spells disaster, no? +MV Rena + diff --git a/tests/modules/lint.py b/tests/modules/lint.py index b7aaf610ca..d31f2c3212 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -82,6 +82,21 @@ def test_modules_lint_multiple_remotes(self): assert len(module_lint.warned) >= 0 +def test_modules_lint_registry(self): + """Test linting the samtools module and alternative registry""" + self.mods_install.install("samtools") + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, registry="public.ecr.aws") + module_lint.lint(print_results=False, module="samtools") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir) + module_lint.lint(print_results=False, module="samtools") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) > 0 + assert len(module_lint.warned) >= 0 + + def test_modules_lint_patched_modules(self): """ Test creating a patch file and applying it to a new version of the the files diff --git a/tests/modules/patch.py b/tests/modules/patch.py index 95cc2cad95..338d890f2f 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -18,7 +18,7 @@ """ ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" -CORRECT_SHA = "0245a9277d51a47c8aa68d264d294cf45312fab8" +CORRECT_SHA = "1dff30bfca2d98eb7ac7b09269a15e822451d99f" SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" BISMARK_ALIGN = "bismark/align" diff --git a/tests/test_cli.py b/tests/test_cli.py index 58c4525a76..57e909e575 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -21,7 +21,7 @@ def test_header(mock_cli): @mock.patch("nf_core.__main__.nf_core_cli") -@mock.patch("nf_core.utils.check_if_outdated", return_value=(True, None, "dummy_version")) +@mock.patch("nf_core.__main__.check_if_outdated", return_value=(True, None, "dummy_version")) def test_header_outdated(mock_check_outdated, mock_nf_core_cli, capsys): """Check cli notifies the user when nf_core is outdated""" nf_core.__main__.run_nf_core() @@ -165,8 +165,12 @@ def test_cli_download(self, mock_dl): "outdir": "/path/outdir", "compress": "tar.gz", "force": None, - "container": "singularity", - "singularity-cache-only": None, + "tower": None, + "download-configuration": None, + "container-system": "singularity", + "container-library": "quay.io", + "container-cache-utilisation": "copy", + "container-cache-index": "/path/index.txt", "parallel-downloads": 2, } @@ -177,12 +181,16 @@ def test_cli_download(self, mock_dl): mock_dl.assert_called_once_with( cmd[-1], - params["revision"], + (params["revision"],), params["outdir"], params["compress"], "force" in params, - params["container"], - "singularity-cache-only" in params, + "tower" in params, + "download-configuration" in params, + params["container-system"], + (params["container-library"],), + params["container-cache-utilisation"], + params["container-cache-index"], params["parallel-downloads"], ) diff --git a/tests/test_create.py b/tests/test_create.py index cc6bf8ba47..298eeecde5 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -13,6 +13,7 @@ TEST_DATA_DIR = Path(__file__).parent / "data" PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" +PIPELINE_TEMPLATE_YML_SKIP = TEST_DATA_DIR / "pipeline_create_template_skip.yml" class NfcoreCreateTest(unittest.TestCase): @@ -107,3 +108,31 @@ def test_pipeline_creation_initiation_customize_template(self, mock_questionary, assert os.path.exists(pipeline_template) with open(pipeline_template) as fh: assert fh.read() == PIPELINE_TEMPLATE_YML.read_text() + + @with_temporary_folder + def test_pipeline_creation_with_yml_skip(self, tmp_path): + pipeline = nf_core.create.PipelineCreate( + name=self.pipeline_name, + description=self.pipeline_description, + author=self.pipeline_author, + version=self.pipeline_version, + no_git=False, + force=True, + outdir=tmp_path, + template_yaml_path=PIPELINE_TEMPLATE_YML_SKIP, + plain=True, + default_branch=self.default_branch, + ) + pipeline.init_pipeline() + assert not os.path.isdir(os.path.join(pipeline.outdir, ".git")) + + # Check pipeline yml has been dumped and matches input + pipeline_template = os.path.join(pipeline.outdir, "pipeline_template.yml") + assert os.path.exists(pipeline_template) + with open(pipeline_template) as fh: + assert fh.read() == PIPELINE_TEMPLATE_YML_SKIP.read_text() + + # Check that some of the skipped files are not present + assert not os.path.exists(os.path.join(pipeline.outdir, "CODE_OF_CONDUCT.md")) + assert not os.path.exists(os.path.join(pipeline.outdir, ".github")) + assert not os.path.exists(os.path.join(pipeline.outdir, "conf", "igenomes.config")) diff --git a/tests/test_download.py b/tests/test_download.py index e2ae882394..dd226d9dae 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -3,16 +3,20 @@ import hashlib import os +import re import shutil import tempfile import unittest +from pathlib import Path from unittest import mock import pytest import nf_core.create import nf_core.utils -from nf_core.download import DownloadWorkflow +from nf_core.download import ContainerError, DownloadWorkflow, WorkflowRepo +from nf_core.synced_repo import SyncedRepo +from nf_core.utils import NFCORE_CACHE_DIR, NFCORE_DIR, nextflow_cmd from .utils import with_temporary_file, with_temporary_folder @@ -32,10 +36,10 @@ def test_get_release_hash_release(self): download_obj.wf_branches, ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) download_obj.get_revision_hash() - assert download_obj.wf_sha == "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" - assert download_obj.outdir == "nf-core-methylseq-1.6" + assert download_obj.wf_sha[download_obj.revision[0]] == "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" + assert download_obj.outdir == "nf-core-methylseq_1.6" assert ( - download_obj.wf_download_url + download_obj.wf_download_url[download_obj.revision[0]] == "https://github.com/nf-core/methylseq/archive/b3e5e3b95aaf01d98391a62a10a3990c0a4de395.zip" ) @@ -51,10 +55,10 @@ def test_get_release_hash_branch(self): download_obj.wf_branches, ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) download_obj.get_revision_hash() - assert download_obj.wf_sha == "819cbac792b76cf66c840b567ed0ee9a2f620db7" - assert download_obj.outdir == "nf-core-exoseq-dev" + assert download_obj.wf_sha[download_obj.revision[0]] == "819cbac792b76cf66c840b567ed0ee9a2f620db7" + assert download_obj.outdir == "nf-core-exoseq_dev" assert ( - download_obj.wf_download_url + download_obj.wf_download_url[download_obj.revision[0]] == "https://github.com/nf-core/exoseq/archive/819cbac792b76cf66c840b567ed0ee9a2f620db7.zip" ) @@ -78,12 +82,16 @@ def test_get_release_hash_non_existent_release(self): def test_download_wf_files(self, outdir): download_obj = DownloadWorkflow(pipeline="nf-core/methylseq", revision="1.6") download_obj.outdir = outdir - download_obj.wf_sha = "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" - download_obj.wf_download_url = ( - "https://github.com/nf-core/methylseq/archive/b3e5e3b95aaf01d98391a62a10a3990c0a4de395.zip" + download_obj.wf_sha = {"1.6": "b3e5e3b95aaf01d98391a62a10a3990c0a4de395"} + download_obj.wf_download_url = { + "1.6": "https://github.com/nf-core/methylseq/archive/b3e5e3b95aaf01d98391a62a10a3990c0a4de395.zip" + } + rev = download_obj.download_wf_files( + download_obj.revision[0], + download_obj.wf_sha[download_obj.revision[0]], + download_obj.wf_download_url[download_obj.revision[0]], ) - download_obj.download_wf_files() - assert os.path.exists(os.path.join(outdir, "workflow", "main.nf")) + assert os.path.exists(os.path.join(outdir, rev, "main.nf")) # # Tests for 'download_configs' @@ -118,30 +126,64 @@ def test_wf_use_local_configs(self, tmp_path): download_obj.download_configs() # Test the function - download_obj.wf_use_local_configs() + download_obj.wf_use_local_configs("workflow") wf_config = nf_core.utils.fetch_wf_config(os.path.join(test_outdir, "workflow"), cache_config=False) - assert wf_config["params.custom_config_base"] == f"'{test_outdir}/workflow/../configs/'" + assert wf_config["params.custom_config_base"] == f"{test_outdir}/workflow/../configs/" # # Tests for 'find_container_images' # @with_temporary_folder @mock.patch("nf_core.utils.fetch_wf_config") - def test_find_container_images(self, tmp_path, mock_fetch_wf_config): + def test_find_container_images_config_basic(self, tmp_path, mock_fetch_wf_config): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) mock_fetch_wf_config.return_value = { "process.mapping.container": "cutting-edge-container", "process.nocontainer": "not-so-cutting-edge", } - download_obj.find_container_images() + download_obj.find_container_images("workflow") assert len(download_obj.containers) == 1 assert download_obj.containers[0] == "cutting-edge-container" + # + # Test for 'find_container_images' in config with nextflow + # + @pytest.mark.skipif( + shutil.which("nextflow") is None, + reason="Can't run test that requires nextflow to run if not installed.", + ) + @with_temporary_folder + @mock.patch("nf_core.utils.fetch_wf_config") + def test__find_container_images_config_nextflow(self, tmp_path, mock_fetch_wf_config): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) + nfconfig_raw = nextflow_cmd( + f"nextflow config -flat {Path(__file__).resolve().parent / 'data/mock_config_containers'}" + ) + config = {} + for l in nfconfig_raw.splitlines(): + ul = l.decode("utf-8") + try: + k, v = ul.split(" = ", 1) + config[k] = v.strip("'\"") + except ValueError: + pass + mock_fetch_wf_config.return_value = config + download_obj.find_container_images("workflow") + assert len(download_obj.containers) == 4 + assert "nfcore/methylseq:1.0" in download_obj.containers + assert "nfcore/methylseq:1.4" in download_obj.containers + assert "nfcore/sarek:dev" in download_obj.containers + assert "https://depot.galaxyproject.org/singularity/r-shinyngs:1.7.1--r42hdfd78af_1" in download_obj.containers + # does not yet pick up nfcore/sarekvep:dev.${params.genome}, because successfully detecting "nfcore/sarek:dev" + # breaks the loop already. However, this loop-breaking is needed to stop iterating over DSL2 syntax if a + # direct download link has been found. Unless we employ a better deduplication, support for this kind of + # free-style if-else switches will sadly remain insufficient. + # # Tests for 'singularity_pull_image' # - # If Singularity is installed, but the container can't be accessed because it does not exist or there are aceess - # restrictions, a FileNotFoundError is raised due to the unavailability of the image. + # If Singularity is installed, but the container can't be accessed because it does not exist or there are access + # restrictions, a RuntimeWarning is raised due to the unavailability of the image. @pytest.mark.skipif( shutil.which("singularity") is None, reason="Can't test what Singularity does if it's not installed.", @@ -150,20 +192,132 @@ def test_find_container_images(self, tmp_path, mock_fetch_wf_config): @mock.patch("rich.progress.Progress.add_task") def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_progress): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_dir) - with pytest.raises(FileNotFoundError): - download_obj.singularity_pull_image("a-container", tmp_dir, None, mock_rich_progress) - # If Singularity is not installed, it raises a FileNotFoundError because the singularity command can't be found. + # Test successful pull + download_obj.singularity_pull_image( + "hello-world", f"{tmp_dir}/hello-world.sif", None, "docker.io", mock_rich_progress + ) + + # Pull again, but now the image already exists + with pytest.raises(ContainerError.ImageExists): + download_obj.singularity_pull_image( + "hello-world", f"{tmp_dir}/hello-world.sif", None, "docker.io", mock_rich_progress + ) + + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExists is raised before attempting to pull.) + with pytest.raises(ContainerError.RegistryNotFound): + download_obj.singularity_pull_image( + "hello-world", + f"{tmp_dir}/hello-world_new.sif", + None, + "register-this-domain-to-break-the-test.io", + mock_rich_progress, + ) + + # test Image not found for several registries + with pytest.raises(ContainerError.ImageNotFound): + download_obj.singularity_pull_image( + "a-container", f"{tmp_dir}/acontainer.sif", None, "quay.io", mock_rich_progress + ) + + with pytest.raises(ContainerError.ImageNotFound): + download_obj.singularity_pull_image( + "a-container", f"{tmp_dir}/acontainer.sif", None, "docker.io", mock_rich_progress + ) + + with pytest.raises(ContainerError.ImageNotFound): + download_obj.singularity_pull_image( + "a-container", f"{tmp_dir}/acontainer.sif", None, "ghcr.io", mock_rich_progress + ) + + # Traffic from Github Actions to GitHub's Container Registry is unlimited, so no harm should be done here. + with pytest.raises(ContainerError.InvalidTag): + download_obj.singularity_pull_image( + "ewels/multiqc:go-rewrite", + f"{tmp_dir}/umi-transfer.sif", + None, + "ghcr.io", + mock_rich_progress, + ) + + @pytest.mark.skipif( + shutil.which("singularity") is None, + reason="Can't test what Singularity does if it's not installed.", + ) + @with_temporary_folder + @mock.patch("rich.progress.Progress.add_task") + def test_singularity_pull_image_successfully(self, tmp_dir, mock_rich_progress): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_dir) + download_obj.singularity_pull_image( + "hello-world", f"{tmp_dir}/yet-another-hello-world.sif", None, "docker.io", mock_rich_progress + ) + + # + # Tests for 'get_singularity_images' + # + @pytest.mark.skipif( + shutil.which("singularity") is None, + reason="Can't test what Singularity does if it's not installed.", + ) + @with_temporary_folder + @mock.patch("nf_core.utils.fetch_wf_config") + def test_get_singularity_images(self, tmp_path, mock_fetch_wf_config): + download_obj = DownloadWorkflow( + pipeline="dummy", + outdir=tmp_path, + container_library=("mirage-the-imaginative-registry.io", "quay.io", "ghcr.io", "docker.io"), + ) + mock_fetch_wf_config.return_value = { + "process.mapping.container": "helloworld", + "process.mapping.container": "helloworld", + "process.mapping.container": "helloooooooworld", + "process.mapping.container": "ewels/multiqc:gorewrite", + } + download_obj.find_container_images("workflow") + assert len(download_obj.container_library) == 4 + # This list of fake container images should produce all kinds of ContainerErrors. + # Test that they are all caught inside get_singularity_images(). + download_obj.get_singularity_images() + + # If Singularity is not installed, it raises a OSError because the singularity command can't be found. @pytest.mark.skipif( shutil.which("singularity") is not None, - reason="Can't test how the code behaves when sungularity is not installed if it is.", + reason="Can't test how the code behaves when singularity is not installed if it is.", ) @with_temporary_folder @mock.patch("rich.progress.Progress.add_task") def test_singularity_pull_image_singularity_not_installed(self, tmp_dir, mock_rich_progress): download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_dir) - with pytest.raises(FileNotFoundError): - download_obj.singularity_pull_image("a-container", tmp_dir, None, mock_rich_progress) + with pytest.raises(OSError): + download_obj.singularity_pull_image( + "a-container", f"{tmp_dir}/anothercontainer.sif", None, "quay.io", mock_rich_progress + ) + + # + # Test for '--singularity-cache remote --singularity-cache-index'. Provide a list of containers already available in a remote location. + # + @with_temporary_folder + def test_remote_container_functionality(self, tmp_dir): + os.environ["NXF_SINGULARITY_CACHEDIR"] = "foo" + + download_obj = DownloadWorkflow( + pipeline="nf-core/rnaseq", + outdir=os.path.join(tmp_dir, "new"), + revision="3.9", + compress_type="none", + container_cache_index=Path(__file__).resolve().parent / "data/testdata_remote_containers.txt", + ) + + download_obj.include_configs = False # suppress prompt, because stderr.is_interactive doesn't. + + # test if the settings are changed to mandatory defaults, if an external cache index is used. + assert download_obj.container_cache_utilisation == "remote" and download_obj.container_system == "singularity" + assert isinstance(download_obj.containers_remote, list) and len(download_obj.containers_remote) == 0 + # read in the file + download_obj.read_remote_containers() + assert len(download_obj.containers_remote) == 33 + assert "depot.galaxyproject.org-singularity-salmon-1.5.2--h84f40af_0.img" in download_obj.containers_remote + assert "MV Rena" not in download_obj.containers_remote # decoy in test file # # Tests for the main entry method 'download_workflow' @@ -177,9 +331,77 @@ def test_download_workflow_with_success(self, tmp_dir, mock_download_image, mock download_obj = DownloadWorkflow( pipeline="nf-core/methylseq", outdir=os.path.join(tmp_dir, "new"), - container="singularity", + container_system="singularity", revision="1.6", compress_type="none", + container_cache_utilisation="copy", ) + download_obj.include_configs = True # suppress prompt, because stderr.is_interactive doesn't. download_obj.download_workflow() + + # + # Test Download for Tower + # + @with_temporary_folder + def test_download_workflow_for_tower(self, tmp_dir): + download_obj = DownloadWorkflow( + pipeline="nf-core/rnaseq", + revision=("3.7", "3.9"), + compress_type="none", + tower=True, + container_system="singularity", + ) + + download_obj.include_configs = False # suppress prompt, because stderr.is_interactive doesn't. + + assert isinstance(download_obj.revision, list) and len(download_obj.revision) == 2 + assert isinstance(download_obj.wf_sha, dict) and len(download_obj.wf_sha) == 0 + assert isinstance(download_obj.wf_download_url, dict) and len(download_obj.wf_download_url) == 0 + + wfs = nf_core.list.Workflows() + wfs.get_remote_workflows() + ( + download_obj.pipeline, + download_obj.wf_revisions, + download_obj.wf_branches, + ) = nf_core.utils.get_repo_releases_branches(download_obj.pipeline, wfs) + + download_obj.get_revision_hash() + + # download_obj.wf_download_url is not set for tower downloads, but the sha values are + assert isinstance(download_obj.wf_sha, dict) and len(download_obj.wf_sha) == 2 + assert isinstance(download_obj.wf_download_url, dict) and len(download_obj.wf_download_url) == 0 + + # The outdir for multiple revisions is the pipeline name and date: e.g. nf-core-rnaseq_2023-04-27_18-54 + assert bool(re.search(r"nf-core-rnaseq_\d{4}-\d{2}-\d{1,2}_\d{1,2}-\d{1,2}", download_obj.outdir, re.S)) + + download_obj.output_filename = f"{download_obj.outdir}.git" + download_obj.download_workflow_tower(location=tmp_dir) + + assert download_obj.workflow_repo + assert isinstance(download_obj.workflow_repo, WorkflowRepo) + assert issubclass(type(download_obj.workflow_repo), SyncedRepo) + + # corroborate that the other revisions are inaccessible to the user. + all_tags = {tag.name for tag in download_obj.workflow_repo.tags} + all_heads = {head.name for head in download_obj.workflow_repo.heads} + + assert set(download_obj.revision) == all_tags + # assert that the download has a "latest" branch. + assert "latest" in all_heads + + # download_obj.download_workflow_tower(location=tmp_dir) will run container image detection for all requested revisions + assert isinstance(download_obj.containers, list) and len(download_obj.containers) == 33 + # manually test container image detection for 3.7 revision only + download_obj.containers = [] # empty container list for the test + download_obj.workflow_repo.checkout(download_obj.wf_sha["3.7"]) + download_obj.find_container_images(download_obj.workflow_repo.access()) + assert len(download_obj.containers) == 30 # 30 containers for 3.7 + assert ( + "https://depot.galaxyproject.org/singularity/bbmap:38.93--he522d1c_0" in download_obj.containers + ) # direct definition + assert ( + "https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:59cdd445419f14abac76b31dd0d71217994cbcc9-0" + in download_obj.containers + ) # indirect definition via $container variable.