Skip to content

Commit

Permalink
Merge pull request #153 from LouisLeNezet/fulltest
Browse files Browse the repository at this point in the history
Update nf-tests and language server fixes
  • Loading branch information
LouisLeNezet authored Nov 11, 2024
2 parents 06d6b60 + 3f52128 commit 10a1dda
Show file tree
Hide file tree
Showing 60 changed files with 1,295 additions and 1,856 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ Initial release of nf-core/phaseimpute, created with the [nf-core](https://nf-co
- [#148](https://github.com/nf-core/phaseimpute/pull/148) - Fix awsfulltest github action for manual dispatch
- [#149](https://github.com/nf-core/phaseimpute/pull/149) - Remove the map file from the awsfulltest
- [#152](https://github.com/nf-core/phaseimpute/pull/152) - Fix URLs in the documentation and remove tools citation in the README, use a white background for all images in the documentation.
- [#153](https://github.com/nf-core/phaseimpute/pull/153) - Update and simplify subworkflows snapshot and check only for files names (no md5sum for bam and vcf files due to timestamp).
- [#157](https://github.com/nf-core/phaseimpute/pull/157) - Add `chunk_model` as parameter for better control over `GLIMPSE2_CHUNK` and set window size in `GLIMPSE1_CHUNK` and `GLIMPSE2_chunk` to 4mb to reduce number of chunks (empirical).

### `Fixed`
Expand All @@ -74,6 +75,7 @@ Initial release of nf-core/phaseimpute, created with the [nf-core](https://nf-co
- [#75](https://github.com/nf-core/phaseimpute/pull/75) - Set frequency computation with VCFFIXUP process as optional with `--compute_freq`. Use Glimpse_chunk on panel vcf to compute the chunk and not makewindows on fasta.
- [#117](https://github.com/nf-core/phaseimpute/pull/117) - Fix directories in CSV.
- [#151](https://github.com/nf-core/phaseimpute/pull/151) - Fix `Type not supported: class org.codehaus.groovy.runtime.GStringImpl` error due to `String` test in `getFileExtension()`.
- [#153](https://github.com/nf-core/phaseimpute/pull/153) - Fix getFileExtension function. Fix image in `usage.md`. Fix small warnings and errors with updated language server. `def` has been added when necesary, `:` use instead of `,` in assertions, `_` added to variables not used in closures, `for` loop replaced by `.each{}`, remove unused code / input.

### `Dependencies`

Expand Down
3 changes: 1 addition & 2 deletions main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,7 @@ workflow {
params.validate_params,
params.monochrome_logs,
args,
params.outdir,
params.input
params.outdir
)

//
Expand Down
12 changes: 6 additions & 6 deletions modules/local/add_columns/tests/main.nf.test.snap
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
]
],
"1": [
"versions.yml:md5,b25787d2ba80184a94de82cf5cfe8e7a"
"versions.yml:md5,ab0afe509bddeef28fcf8d00db1cec81"
],
"txt": [
[
Expand All @@ -58,15 +58,15 @@
]
],
"versions": [
"versions.yml:md5,b25787d2ba80184a94de82cf5cfe8e7a"
"versions.yml:md5,ab0afe509bddeef28fcf8d00db1cec81"
]
}
],
"meta": {
"nf-test": "0.8.4",
"nextflow": "24.04.3"
},
"timestamp": "2024-07-17T14:43:39.390342082"
"timestamp": "2024-11-06T13:32:32.608346737"
},
"Add columns to txt file with missing fields": {
"content": [
Expand All @@ -81,7 +81,7 @@
]
],
"1": [
"versions.yml:md5,b25787d2ba80184a94de82cf5cfe8e7a"
"versions.yml:md5,ab0afe509bddeef28fcf8d00db1cec81"
],
"txt": [
[
Expand All @@ -93,14 +93,14 @@
]
],
"versions": [
"versions.yml:md5,b25787d2ba80184a94de82cf5cfe8e7a"
"versions.yml:md5,ab0afe509bddeef28fcf8d00db1cec81"
]
}
],
"meta": {
"nf-test": "0.8.4",
"nextflow": "24.04.3"
},
"timestamp": "2024-07-17T14:44:39.317216249"
"timestamp": "2024-11-06T13:32:39.650698807"
}
}
1 change: 0 additions & 1 deletion modules/local/bam_chr_extract/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ process BAM_CHR_EXTRACT {
"""

stub:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
"""
touch ${prefix}.txt
Expand Down
6 changes: 3 additions & 3 deletions modules/local/bam_chr_extract/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@ nextflow_process {
input[0] = Channel.fromList([
[
[ id:'test_single_end_bam' ], // meta map
file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true)
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.single_end.bam', checkIfExists: true)
],
[
[id: 'test2_paired_end_sorted_bam'], // meta map
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true)
file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam', checkIfExists: true)
],
[
[id: 'test_paired_end_sorted_cram'], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)
file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/cram/test.paired_end.sorted.cram', checkIfExists: true)
]
])
"""
Expand Down
12 changes: 6 additions & 6 deletions modules/local/list_to_file/tests/main.nf.test.snap
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
{
"id": "all"
},
"all_id.txt:md5,6ffc4e703a69bb652dc666a8dcae1863",
"all_noid.txt:md5,045a6e673d95a6e8152a1d06959db93a"
"all.id.txt:md5,6ffc4e703a69bb652dc666a8dcae1863",
"all.noid.txt:md5,045a6e673d95a6e8152a1d06959db93a"
]
],
"1": [
Expand All @@ -19,8 +19,8 @@
{
"id": "all"
},
"all_id.txt:md5,6ffc4e703a69bb652dc666a8dcae1863",
"all_noid.txt:md5,045a6e673d95a6e8152a1d06959db93a"
"all.id.txt:md5,6ffc4e703a69bb652dc666a8dcae1863",
"all.noid.txt:md5,045a6e673d95a6e8152a1d06959db93a"
]
],
"versions": [
Expand All @@ -30,9 +30,9 @@
],
"meta": {
"nf-test": "0.8.4",
"nextflow": "24.04.4"
"nextflow": "24.04.3"
},
"timestamp": "2024-10-16T16:15:18.4568228"
"timestamp": "2024-11-06T13:55:57.105169612"
},
"Content_withid": {
"content": [
Expand Down
1 change: 0 additions & 1 deletion modules/local/vcf_chr_extract/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ process VCF_CHR_EXTRACT {
"""

stub:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
"""
touch ${prefix}.txt
Expand Down
6 changes: 3 additions & 3 deletions modules/local/vcf_chr_extract/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@ nextflow_process {
input[0] = Channel.fromList([
[
[id:'test_bcf'], // meta map
file(params.test_data['sarscov2']['illumina']['test_bcf'], checkIfExists: true)
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/vcf/test.bcf', checkIfExists: true)
],
[
[id: 'test2_vcf'], // meta map
file(params.test_data['sarscov2']['illumina']['test2_vcf'], checkIfExists: true)
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/vcf/test2.vcf', checkIfExists: true)
],
[
[id: 'test2_haplotc_vcf_gz'], // meta map
file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_vcf_gz'], checkIfExists: true)
file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/gatk/haplotypecaller_calls/test2_haplotc.vcf.gz', checkIfExists: true)
]
])
"""
Expand Down
1 change: 1 addition & 0 deletions subworkflows/local/bam_chr_rename_samtools/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ workflow BAM_CHR_RENAME_SAMTOOLS {
SAMTOOLS_REHEADER(
ch_bam.map{
meta, bam, index, prefix ->
def cmd = ""
if (prefix == "nochr") {
cmd = 'sed -E "s/^(@SQ.*\\tSN:)chr/\\1/"'
} else if (prefix == "chr") {
Expand Down
48 changes: 30 additions & 18 deletions subworkflows/local/bam_chr_rename_samtools/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,14 @@ nextflow_workflow {
input[0] = Channel.fromList([
[
[id: "test_paired"],
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExist:true),
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExist:true),
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExist:true),
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam.bai', checkIfExist:true),
"nochr"
],
[
[id: "test_MT"],
file(params.test_data['homo_sapiens']['illumina']['test_illumina_mt_bam'], checkIfExist:true),
file(params.test_data['homo_sapiens']['illumina']['test_illumina_mt_bam_bai'], checkIfExist:true),
file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/bam/test_illumina_mt.bam', checkIfExist:true),
file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/bam/test_illumina_mt.bam.bai', checkIfExist:true),
"nochr"
],
])
Expand All @@ -42,11 +42,17 @@ nextflow_workflow {
then {
assertAll(
{ assert workflow.success },
{ assert snapshot(workflow.out).match() },
{ assert snapshot(workflow.out.bam_renamed.collect{
bam(it[1]).getHeader().findAll { it.startsWith ("@SQ") }
}).match("headernochr")
}
{ assert snapshot(
workflow.out.versions,
workflow.out.bam_renamed.collect{[
it[0],
path(it[1]).getFileName().toString(),
path(it[2]).getFileName().toString()
] },
workflow.out.bam_renamed.collect{
bam(it[1]).getHeader().findAll { it.startsWith ("@SQ") }
}
).match() }
)
}
}
Expand All @@ -60,14 +66,14 @@ nextflow_workflow {
input[0] = Channel.fromList([
[
[id: "test_paired"],
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExist:true),
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExist:true),
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExist:true),
file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam.bai', checkIfExist:true),
"nochr"
],
[
[id: "test_MT"],
file(params.test_data['homo_sapiens']['illumina']['test_illumina_mt_bam'], checkIfExist:true),
file(params.test_data['homo_sapiens']['illumina']['test_illumina_mt_bam_bai'], checkIfExist:true),
file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/bam/test_illumina_mt.bam', checkIfExist:true),
file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/bam/test_illumina_mt.bam.bai', checkIfExist:true),
"nochr"
],
])
Expand All @@ -87,11 +93,17 @@ nextflow_workflow {
then {
assertAll(
{ assert workflow.success },
{ assert snapshot(workflow.out).match() },
{ assert snapshot(workflow.out.bam_renamed.collect{
bam(it[1]).getHeader().findAll { it.startsWith ("@SQ") }
}).match("headerwithchr")
}
{ assert snapshot(
workflow.out.versions,
workflow.out.bam_renamed.collect{[
it[0],
path(it[1]).getFileName().toString(),
path(it[2]).getFileName().toString()
] },
workflow.out.bam_renamed.collect{
bam(it[1]).getHeader().findAll { it.startsWith ("@SQ") }
}
).match() }
)
}
}
Expand Down
Loading

0 comments on commit 10a1dda

Please sign in to comment.