Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added test_nothing nf-test and updated CI #620

Open
wants to merge 14 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .github/include.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
".":
- ./.github/workflows/**
- ./nf-test.config
- ./nextflow.config
tests:
- ./assets/*
- ./bin/*
- ./conf/*
- ./main.nf
- ./nextflow_schema.json
117 changes: 117 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ on:

env:
NXF_ANSI_LOG: false
NFT_VER: "0.8.4"
NFT_WORKDIR: "~"
NFT_DIFF: "pdiff"
NFT_DIFF_ARGS: "--line-numbers --expand-tabs=2"

concurrency:
group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
Expand Down Expand Up @@ -115,3 +119,116 @@ jobs:
- name: Run pipeline with ${{ matrix.profile }} test profile
run: |
nextflow run ${GITHUB_WORKSPACE} -profile test,docker --outdir ./results --binqc_tool checkm --checkm_db databases/checkm

# Adam and Carson's method for running only necessary nf-tests
changes:
name: Check for changes
runs-on: ubuntu-latest
outputs:
nf_test_files: ${{ steps.list.outputs.components }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0

- name: List nf-test files
id: list
uses: adamrtalbot/[email protected]
with:
head: ${{ github.sha }}
base: origin/${{ github.base_ref }}
include: .github/include.yaml

- name: print list of nf-test files
run: |
echo ${{ steps.list.outputs.components }}

nf-test:
name: ${{ matrix.nf_test_files }} ${{ matrix.profile }} NF-${{ matrix.NXF_VER }}
needs: [changes]
if: needs.changes.outputs.nf_test_files != '[]'
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
NXF_VER:
- "latest-everything"
- "23.04"
nf_test_files: ["${{ fromJson(needs.changes.outputs.nf_test_files) }}"]
profile:
- "docker"

steps:
- name: Check out pipeline code
uses: actions/checkout@v4

- name: Install Nextflow
uses: nf-core/setup-nextflow@v2
with:
version: "${{ matrix.NXF_VER }}"

- uses: actions/setup-python@v4
with:
python-version: "3.11"
architecture: "x64"

- name: Install pdiff to see diff between nf-test snapshots
run: |
python -m pip install --upgrade pip
pip install pdiff

- name: Cache nf-test installation
id: cache-software
uses: actions/cache@v3
with:
path: |
/usr/local/bin/nf-test
/home/runner/.nf-test/nf-test.jar
key: ${{ runner.os }}-${{ env.NFT_VER }}-nftest

- name: Install nf-test
if: steps.cache-software.outputs.cache-hit != 'true'
run: |
wget -qO- https://code.askimed.com/install/nf-test | bash
sudo mv nf-test /usr/local/bin/

- name: Run nf-test
run: |
nf-test test --verbose ${{ matrix.nf_test_files }} --profile "+${{ matrix.profile }}" --junitxml=test.xml --tap=test.tap

- uses: pcolby/tap-summary@v1
with:
path: >-
test.tap

- name: Output log on failure
if: failure()
run: |
sudo apt install bat > /dev/null
batcat --decorations=always --color=always ${{ github.workspace }}/.nf-test/tests/*/meta/nextflow.log

- name: Publish Test Report
uses: mikepenz/action-junit-report@v3
if: always() # always run even if the previous step fails
with:
report_paths: test.xml

confirm-pass:
runs-on: ubuntu-latest
needs:
- changes
- nf-test
if: always()
steps:
- name: All tests ok
if: ${{ !contains(needs.*.result, 'failure') }}
run: exit 0
- name: One or more tests failed
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1

- name: debug-print
if: always()
run: |
echo "toJSON(needs) = ${{ toJSON(needs) }}"
echo "toJSON(needs.*.result) = ${{ toJSON(needs.*.result) }}"
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ results/
testing/
testing*
*.pyc
.nf-test*
1 change: 1 addition & 0 deletions bin/combine_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,7 @@ def main(args=None):
how="outer",
)

results.sort_values("bin", inplace=True)
results.to_csv(args.out, sep="\t")


Expand Down
1 change: 1 addition & 0 deletions bin/get_mag_depths_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def main(args=None):
assembly_results = pd.read_csv(assembly_depths_file, index_col="bin", sep="\t")
results = results.append(assembly_results, sort=True, verify_integrity=True)

results.sort_values('bin', inplace=True)
results.to_csv(args.out, sep="\t")


Expand Down
1 change: 1 addition & 0 deletions bin/summary_busco.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,7 @@ def main(args=None):
else:
df_final = df_specific.append(df_failed)

df_final.sort_values(by="GenomeBin", inplace=True)
df_final.to_csv(args.out, sep="\t", index=False)


Expand Down
6 changes: 4 additions & 2 deletions conf/test.config
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,17 @@ params {
config_profile_description = 'Minimal test dataset to check pipeline function'

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '6.GB'
max_cpus = 4
max_memory = '15.GB'
max_time = '6.h'

// Input data
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.multirun.csv'

centrifuge_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_cf.tar.gz'
kraken2_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_kraken.tgz'
skip_krona = false
megahit_fix_cpu_1 = true
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
busco_db = "https://busco-data.ezlab.org/v5/data/lineages/bacteria_odb10.2024-01-08.tar.gz"
Expand Down
4 changes: 2 additions & 2 deletions conf/test_nothing.config
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ params {
config_profile_description = 'Minimal test dataset to check pipeline function'

// Limit resources so that this can run on GitHub Actions
max_cpus = 2
max_memory = '6.GB'
max_cpus = 4
max_memory = '12.GB'
max_time = '6.h'

// Input data
Expand Down
34 changes: 34 additions & 0 deletions nf-core-hyak.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
params {
config_profile_description = 'UW Hyak Pedslabs cluster profile provided by nf-core/configs.'
config_profile_contact = 'Carson J. Miller (@CarsonJM)'
config_profile_url = 'https://www.peds.uw.edu/'
max_memory = 742.GB
max_cpus = 40
max_time = 72.h
}

process {
executor = 'slurm'
clusterOptions = { task.attempt < 4 ? ( task.attempt < 3 ? "-A stf" : "-A stf" ) : "-A pedslabs" }

withName:'.*' {
queue = { task.attempt < 4 ? ( task.attempt < 3 ? 'ckpt' : 'cpu-g2-mem2x' ) : 'compute-hugemem' }
errorStrategy = { task.attempt < 5 ? 'retry' : 'ignore' }
maxRetries = 5
maxErrors = '-1'
}
}

executor {
queuesize = 50
submitRateLimit = '1 sec'
}

singularity {
enabled = true
autoMounts = true
}

debug {
cleanup = false
}
10 changes: 10 additions & 0 deletions nf-test.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
config {
// Location of nf-tests
testsDir "."

// nf-test directory used to create temporary files for each test
workDir System.getenv("NFT_WORKDIR") ?: ".nf-test"

// Location of an optional nextflow.config file specific for executing pipeline tests
configFile "tests/nextflow.config"
}
64 changes: 64 additions & 0 deletions tests/nextflow.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
params {
// Base directory for nf-core/modules test data
modules_testdata_base_path = 's3://ngi-igenomes/testdata/nf-core/modules/'

// Base directory for nf-core/fetchngs test data
pipelines_testdata_base_path = 's3://ngi-igenomes/testdata/nf-core/pipelines/fetchngs/1.15.0/'

max_cpus = 4
max_memory = '12.GB'
time = '2.h'
}

// Impose sensible resource limits for testing
process {
withName: BOWTIE2_PHIX_REMOVAL_ALIGN {
cpus = 1
memory = 12.GB
time = 2.h
}

withName: CENTRIFUGE_CENTRIFUGE {
cpus = 1
memory = 12.GB
time = 2.h
}

withName: BOWTIE2_ASSEMBLY_ALIGN {
cpus = 1
memory = 12.GB
time = 2.h
}

withName: METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS {
cpus = 1
memory = 12.GB
time = 2.h
}

withName: QUAST_BINS {
cpus = 1
memory = 12.GB
time = 2.h
}

withName: METABAT2_METABAT2 {
cpus = 1
memory = 12.GB
time = 2.h
}
}

// Impose same minimum Nextflow version as the pipeline for testing
manifest {
nextflowVersion = '!>=23.04.0'
}

// Disable all Nextflow reporting options
timeline { enabled = false }
report { enabled = false }
trace { enabled = true }
dag { enabled = false }

// Load modules test_data.config
includeConfig 'https://github.com/nf-core/modules/raw/master/tests/config/test_data.config'
56 changes: 56 additions & 0 deletions tests/test.nf.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
nextflow_pipeline {

name "Test pipeline"
script "../main.nf"
profile "test"

test("Profile: test") {

when {
params {
outdir = "$outputDir"
}
}



then {
def output_files = []
new File("$outputDir").eachFileRecurse { file -> output_files << file }

assertAll(
{ assert workflow.success },
{ assert snapshot(
output_files.findAll {
// fastp *.json
// bowtie2 *.log
it.toString() ==~ /.*QC_shortreads.*(json|log)/ ||
// Centrifuge *kreport.txt, *report.txt, *results.txt
// kraken2 *report.txt
it.toString() ==~ /.*Taxonomy.*txt/ ||
// MEGAHIT *.contigs.fa.gz
// metaSPAdes *contigs.fasta.gz, *scaffolds.fasta.gz
it.toString() ==~ /.*Assembly.*(fa\.gz|fasta\.gz)/ ||
// cannot add QUAST outputs because no sample-specific prefix for report files
// bowtie2 *.log
it.toString() ==~ /.*Assembly.*QC.*bowtie2\.log/ ||
// Prodigal *.gff.gz, *.faa.gz, *.fna.gz, *all.txt.gz
// Prokka *.gff, *.err, *.faa, *.ffn, *.fna, *.fsa, *.gbk, *.tbl, *.tsv, *.txt
it.toString() ==~ /.*Annotation.*(\.gz|err|faa|ffn|fna|fsa|gff|tbl|tsv|txt)/ ||
// MaxBin2 *.fa.gz
// MetaBAT2 *.fa.gz
it.toString() ==~ /.*GenomeBinning.*fa\.gz/ ||
// *depth.txt.gz
it.toString() ==~ /.*GenomeBinning.*depths.*txt\.gz/ ||
// bin_depths_summary.tsv, busco_summary.tsv, bin_summary.tsv
it.toString() ==~ /.*GenomeBinning.*busco_summary\.tsv/
// QUAST files have dates
}
).match()
},
{ assert new File("${outputDir}/GenomeBinning/depths/bins/bin_depths_summary.tsv").exists() },
{ assert new File("${outputDir}/GenomeBinning/bin_summary.tsv").exists() },
)
}
}
}
Loading
Loading