Skip to content

Commit

Permalink
Merge pull request #180 from nf-core/lsp-formatting
Browse files Browse the repository at this point in the history
Run lsp formatting
  • Loading branch information
edmundmiller authored Dec 22, 2024
2 parents 7a4e356 + e6b3ecd commit ae6518e
Show file tree
Hide file tree
Showing 39 changed files with 431 additions and 416 deletions.
20 changes: 10 additions & 10 deletions conf/test.config
Original file line number Diff line number Diff line change
Expand Up @@ -24,30 +24,30 @@ params {

// Input data
// TODO params.pipelines_testdata_base_path + 'viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv'
input = "${projectDir}/assets/samplesheet.csv"
input = "${projectDir}/assets/samplesheet.csv"

// Genome references
fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/nascent/reference/GRCh38_chr21.fa'
gtf = 'https://raw.githubusercontent.com/nf-core/test-datasets/nascent/reference/genes_chr21.gtf'
hisat2_index = 'https://raw.githubusercontent.com/nf-core/test-datasets/nascent/reference/GRCh38_chr21_hisat2.tar.gz'
fasta = 'https://raw.githubusercontent.com/nf-core/test-datasets/nascent/reference/GRCh38_chr21.fa'
gtf = 'https://raw.githubusercontent.com/nf-core/test-datasets/nascent/reference/genes_chr21.gtf'
hisat2_index = 'https://raw.githubusercontent.com/nf-core/test-datasets/nascent/reference/GRCh38_chr21_hisat2.tar.gz'

assay_type = "GROseq"
skip_grohmm = true // FIXME Fails due to higher memory requirements
assay_type = "GROseq"
// FIXME Fails due to higher memory requirements
skip_grohmm = true
grohmm_min_uts = 5
grohmm_max_uts = 10
grohmm_min_ltprobb = -100
grohmm_max_ltprobb = -150
filter_bed = "${projectDir}/tests/config/unwanted_region.bed"
intersect_bed = "${projectDir}/tests/config/wanted_region.bed"
filter_bed = "${projectDir}/tests/config/unwanted_region.bed"
intersect_bed = "${projectDir}/tests/config/wanted_region.bed"
}

process {
withName: STAR_GENOMEGENERATE {
ext.args = '--genomeSAindexNbases 9'
}

withName: 'PINTS_CALLER' {
// HACK Tests fail after latest modules update
withName: PINTS_CALLER {
ext.args = { "--disable-small" }
}
}
14 changes: 7 additions & 7 deletions conf/test_copro.config
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,17 @@ params {
config_profile_description = 'Test dataset to check PINTS pipeline function(https://pints.yulab.org/tre_calling#part-iv-case-2)'

// Input data
input = "${projectDir}/tests/config/samplesheets/copro.csv"
input = "${projectDir}/tests/config/samplesheets/copro.csv"

genome = 'hg38'
assay_type = 'CoPRO'
filter_bed = "https://pints.yulab.org/ref/examples/promoters_1kb_tss_centered.bed.gz"
with_umi = true
umitools_dedup_stats = true
genome = 'hg38'
assay_type = 'CoPRO'
filter_bed = "https://pints.yulab.org/ref/examples/promoters_1kb_tss_centered.bed.gz"
with_umi = true
umitools_dedup_stats = true
}

process {
withName: NFCORE_NASCENT:NASCENT:FASTP {
withName: FASTP {
ext.args = [
"--adapter_sequence TGGAATTCTCGGGTGCCAAGGAACTCCAGTCAC",
"--adapter_sequence_r2 GATCGTCGGACTGTAGAACTCTGAAC",
Expand Down
6 changes: 3 additions & 3 deletions conf/test_full.config
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ params {

// Input data for full size test
// TODO params.pipelines_testdata_base_path + 'viralrecon/samplesheet/samplesheet_full_illumina_amplicon.csv'
input = "${projectDir}/assets/samplesheet_full.csv"
input = "${projectDir}/assets/samplesheet_full.csv"

// Genome references
genome = 'hg38'
assay_type = 'GROseq'
genome = 'hg38'
assay_type = 'GROseq'
}
19 changes: 10 additions & 9 deletions conf/test_grocap.config
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,22 @@ params {
config_profile_description = 'Test dataset to check PINTS pipeline function(https://pints.yulab.org/tre_calling#part-iii-case-1)'

// Input data
input = "${projectDir}/tests/config/samplesheets/grocap.csv"
input = "${projectDir}/tests/config/samplesheets/grocap.csv"

genome = 'hg38'
assay_type = 'GROcap'
filter_bed = "https://pints.yulab.org/ref/examples/promoters_1kb_tss_centered.bed.gz"
genome = 'hg38'
assay_type = 'GROcap'
filter_bed = "https://pints.yulab.org/ref/examples/promoters_1kb_tss_centered.bed.gz"
}

process {
withName: NFCORE_NASCENT:NASCENT:FASTP {
// only keep reads longer than 14nts after trimming
// This library was polyadenylated,
// so we are trimming the last 20nts per reads (with --trim_tail1).
// For more recent single-end PRO/GRO-cap libraries, this may not be necessary.
withName: 'NFCORE_NASCENT:NASCENT:FASTP' {
ext.args = [
"--adapter_sequence TGGAATTCTCGGGTGCCAAGG",
"-l 14", // only keep reads longer than 14nts after trimming
// This library was polyadenylated,
// so we are trimming the last 20nts per reads (with --trim_tail1).
// For more recent single-end PRO/GRO-cap libraries, this may not be necessary.
"-l 14",
"--trim_tail1 20",
"--low_complexity_filter",
"-w 8"
Expand Down
10 changes: 5 additions & 5 deletions modules/local/bed2saf.nf
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
process BED2SAF {
tag "$meta.id"
tag "${meta.id}"
label 'process_single'

conda "conda-forge::gawk=5.1.0"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/ubuntu:20.04' :
'nf-core/ubuntu:20.04' }"
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container
? 'https://depot.galaxyproject.org/singularity/ubuntu:20.04'
: 'nf-core/ubuntu:20.04'}"

input:
tuple val(meta), path(bed)
Expand All @@ -20,7 +20,7 @@ process BED2SAF {
script:
"""
awk 'OFS="\\t" {print \$1"."\$2"."\$3, \$1, \$2, \$3, "."}' \\
$bed \\
${bed} \\
> ${bed.baseName}.saf
cat <<-END_VERSIONS > versions.yml
Expand Down
20 changes: 11 additions & 9 deletions modules/local/dreg_prep/main.nf
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
process DREG_PREP {

tag "$meta.id"
tag "${meta.id}"
label 'process_low'

conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-f01e242bdea19948f0576fdca94777242fe4c2cb:4238fb992d2a93e648108c86e3a9f51348e834a9-0' :
'biocontainers/mulled-v2-f01e242bdea19948f0576fdca94777242fe4c2cb:4238fb992d2a93e648108c86e3a9f51348e834a9-0' }"
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container
? 'https://depot.galaxyproject.org/singularity/mulled-v2-f01e242bdea19948f0576fdca94777242fe4c2cb:4238fb992d2a93e648108c86e3a9f51348e834a9-0'
: 'biocontainers/mulled-v2-f01e242bdea19948f0576fdca94777242fe4c2cb:4238fb992d2a93e648108c86e3a9f51348e834a9-0'}"

input:
tuple val(meta), path(bam_file), val(index)
path sizes
path sizes
val assay_type

output:
Expand Down Expand Up @@ -78,10 +78,11 @@ process DREG_PREP {
echo "bedGraph to bigwig done"
"""
} else {
}
else {
if (forwardStranded) {
"""
samtools view -@ $task.cpus -bf 0x2 ${bam_file} | samtools sort -n -@ $task.cpus \\
samtools view -@ ${task.cpus} -bf 0x2 ${bam_file} | samtools sort -n -@ ${task.cpus} \\
> ${prefix}.dreg.bam
bedtools bamtobed -bedpe -mate1 -i ${prefix}.dreg.bam \\
Expand Down Expand Up @@ -118,9 +119,10 @@ process DREG_PREP {
${prefix}.unsorted.bedGraph \\
> ${prefix}.bedGraph
"""
} else {
}
else {
"""
samtools view -@ $task.cpus -bf 0x2 ${bam_file} | samtools sort -n -@ $task.cpus \\
samtools view -@ ${task.cpus} -bf 0x2 ${bam_file} | samtools sort -n -@ ${task.cpus} \\
> ${prefix}.dreg.bam
bedtools bamtobed -bedpe -mate1 -i ${prefix}.dreg.bam \\
Expand Down
20 changes: 10 additions & 10 deletions modules/local/grohmm/parametertuning/main.nf
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
process GROHMM_PARAMETERTUNING {
tag "$meta.id|$UTS|$LtProbB"
tag "${meta.id}|${UTS}|${LtProbB}"
label 'process_high'
// array 10

conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/b9/b929af5662486ba6ce2d27eb501e5c7ec71ca7dd8e333fe5d3dcf2803d87cf67/data' :
'community.wave.seqera.io/library/grohmm:833aa94cad4202ac' }"
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container
? 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/b9/b929af5662486ba6ce2d27eb501e5c7ec71ca7dd8e333fe5d3dcf2803d87cf67/data'
: 'community.wave.seqera.io/library/grohmm:833aa94cad4202ac'}"

input:
tuple val(meta), path(bams), path(bais), val(UTS), val(LtProbB)
Expand All @@ -15,7 +15,7 @@ process GROHMM_PARAMETERTUNING {
output:
tuple val(meta), path("*.tuning.csv"), emit: tuning
tuple val(meta), path("*.tuning.consensus.bed"), emit: bed
path "versions.yml", emit: versions
path "versions.yml", emit: versions

when:
task.ext.when == null || task.ext.when
Expand All @@ -27,12 +27,12 @@ process GROHMM_PARAMETERTUNING {
grohmm_parametertuning.R \\
--bam_file ${bams} \\
--outprefix ${prefix} \\
--gxf $gxf \\
--uts $UTS \\
--ltprobb $LtProbB \\
--gxf ${gxf} \\
--uts ${UTS} \\
--ltprobb ${LtProbB} \\
--outdir ./ \\
--cores $task.cpus \\
$args
--cores ${task.cpus} \\
${args}
cat <<-END_VERSIONS > versions.yml
"${task.process}":
Expand Down
24 changes: 12 additions & 12 deletions modules/local/grohmm/transcriptcalling/main.nf
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
process GROHMM_TRANSCRIPTCALLING {
tag "$meta.id"
tag "${meta.id}"
label 'process_high'
label 'process_long'

conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/b9/b929af5662486ba6ce2d27eb501e5c7ec71ca7dd8e333fe5d3dcf2803d87cf67/data' :
'community.wave.seqera.io/library/grohmm:833aa94cad4202ac' }"
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container
? 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/b9/b929af5662486ba6ce2d27eb501e5c7ec71ca7dd8e333fe5d3dcf2803d87cf67/data'
: 'community.wave.seqera.io/library/grohmm:833aa94cad4202ac'}"

input:
tuple val(meta), path(bams), path(bais), path(tuning_file)
path gxf

output:
tuple val(meta), path("*.transcripts.txt"), emit: transcripts
tuple val(meta), path("*.eval.txt") , emit: eval
tuple val(meta), path("*.eval.txt"), emit: eval
tuple val(meta), path("*.transcripts.bed"), emit: transcripts_bed
tuple val(meta), path("*.tdFinal.txt") , emit: td
tuple val(meta), path("*.tdplot_mqc.png") , emit: td_plot
tuple val(meta), path("*.tdFinal_mqc.csv") , emit: mqc_csv
path "versions.yml" , emit: versions
tuple val(meta), path("*.tdFinal.txt"), emit: td
tuple val(meta), path("*.tdplot_mqc.png"), emit: td_plot
tuple val(meta), path("*.tdFinal_mqc.csv"), emit: mqc_csv
path "versions.yml", emit: versions

when:
task.ext.when == null || task.ext.when
Expand All @@ -32,11 +32,11 @@ process GROHMM_TRANSCRIPTCALLING {
--bam_file ${bams} \\
--tuning_file ${tuning_file} \\
--outprefix ${prefix} \\
--gxf $gxf \\
--gxf ${gxf} \\
--outdir ./ \\
--cores $task.cpus \\
--cores ${task.cpus} \\
--memory ${task.memory.toMega()} \\
$args
${args}
cat <<-END_VERSIONS > versions.yml
"${task.process}":
Expand Down
15 changes: 8 additions & 7 deletions modules/local/gtf2bed.nf
Original file line number Diff line number Diff line change
@@ -1,26 +1,27 @@
process GTF2BED {
tag "$gtf"
tag "${gtf}"
label 'process_low'

conda "conda-forge::perl=5.26.2"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/perl:5.26.2' :
'biocontainers/perl:5.26.2' }"
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container
? 'https://depot.galaxyproject.org/singularity/perl:5.26.2'
: 'biocontainers/perl:5.26.2'}"

input:
path gtf

output:
path '*.bed' , emit: bed
path '*.bed', emit: bed
path "versions.yml", emit: versions

when:
task.ext.when == null || task.ext.when

script: // This script is bundled with the pipeline, in nf-core/nascent/bin/
script:
// This script is bundled with the pipeline, in nf-core/nascent/bin/
"""
gtf2bed \\
$gtf \\
${gtf} \\
> ${gtf.baseName}.bed
cat <<-END_VERSIONS > versions.yml
Expand Down
28 changes: 13 additions & 15 deletions subworkflows/local/align_bwamem2/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@
// Alignment with BWAMEM2
//

include { BWAMEM2_MEM } from '../../../modules/nf-core/bwamem2/mem/main'
include { BWAMEM2_MEM } from '../../../modules/nf-core/bwamem2/mem/main'
include { BAM_SORT_STATS_SAMTOOLS } from '../../nf-core/bam_sort_stats_samtools/main'

workflow ALIGN_BWAMEM2 {
take:
ch_reads // channel (mandatory): [ val(meta), [ path(reads) ] ]
ch_index // channel (mandatory): [ val(meta2), path(index) ]
val_sort_bam // boolean (mandatory): true or false
ch_fasta // channel (optional) : [ val(meta3), path(fasta) ]
ch_reads // channel (mandatory): [ val(meta), [ path(reads) ] ]
ch_index // channel (mandatory): [ val(meta2), path(index) ]
val_sort_bam // boolean (mandatory): true or false
ch_fasta // channel (optional) : [ val(meta3), path(fasta) ]

main:
ch_versions = Channel.empty()
Expand All @@ -19,25 +19,23 @@ workflow ALIGN_BWAMEM2 {
// Map reads with BWA
//

BWAMEM2_MEM ( ch_reads, ch_index, ch_fasta, val_sort_bam )
BWAMEM2_MEM(ch_reads, ch_index, ch_fasta, val_sort_bam)
ch_versions = ch_versions.mix(BWAMEM2_MEM.out.versions.first())

//
// Sort, index BAM file and run samtools stats, flagstat and idxstats
//

BAM_SORT_STATS_SAMTOOLS ( BWAMEM2_MEM.out.bam, ch_fasta )
BAM_SORT_STATS_SAMTOOLS(BWAMEM2_MEM.out.bam, ch_fasta)
ch_versions = ch_versions.mix(BAM_SORT_STATS_SAMTOOLS.out.versions)

emit:
bam_orig = BWAMEM2_MEM.out.bam // channel: [ val(meta), path(bam) ]

bam = BAM_SORT_STATS_SAMTOOLS.out.bam // channel: [ val(meta), path(bam) ]
bai = BAM_SORT_STATS_SAMTOOLS.out.bai // channel: [ val(meta), path(bai) ]
csi = BAM_SORT_STATS_SAMTOOLS.out.csi // channel: [ val(meta), path(csi) ]
stats = BAM_SORT_STATS_SAMTOOLS.out.stats // channel: [ val(meta), path(stats) ]
bam_orig = BWAMEM2_MEM.out.bam // channel: [ val(meta), path(bam) ]
bam = BAM_SORT_STATS_SAMTOOLS.out.bam // channel: [ val(meta), path(bam) ]
bai = BAM_SORT_STATS_SAMTOOLS.out.bai // channel: [ val(meta), path(bai) ]
csi = BAM_SORT_STATS_SAMTOOLS.out.csi // channel: [ val(meta), path(csi) ]
stats = BAM_SORT_STATS_SAMTOOLS.out.stats // channel: [ val(meta), path(stats) ]
flagstat = BAM_SORT_STATS_SAMTOOLS.out.flagstat // channel: [ val(meta), path(flagstat) ]
idxstats = BAM_SORT_STATS_SAMTOOLS.out.idxstats // channel: [ val(meta), path(idxstats) ]

versions = ch_versions // channel: [ path(versions.yml) ]
versions = ch_versions // channel: [ path(versions.yml) ]
}
Loading

0 comments on commit ae6518e

Please sign in to comment.