Skip to content
Snippets Groups Projects
Unverified Commit bd62a1c4 authored by van den Berg's avatar van den Berg Committed by GitHub
Browse files

Merge branch 'develop' into Pacbio-variantcalling

parents 353224aa 3c6d6383
No related branches found
No related tags found
No related merge requests found
...@@ -16,6 +16,9 @@ version 5.0.0-dev ...@@ -16,6 +16,9 @@ version 5.0.0-dev
+ isoseq3: Add required bam index input to isoseq3. + isoseq3: Add required bam index input to isoseq3.
+ pbbam: Add task for indexing PacBio bam files + pbbam: Add task for indexing PacBio bam files
+ picard: Add CollectHsMetrics and CollectVariantCallingMetrics. + picard: Add CollectHsMetrics and CollectVariantCallingMetrics.
+ bcftools: add tmpDir input to specify temporary directory when sorting.
+ bcftools: remove outputType and implement indexing based on output file extension.
+ NanoPack: Add parameter_meta to NanoPlot task.
+ Centrifuge: Remove metrics file from classification (which causes the + Centrifuge: Remove metrics file from classification (which causes the
summary report to be empty). summary report to be empty).
https://github.com/DaehwanKimLab/centrifuge/issues/83 https://github.com/DaehwanKimLab/centrifuge/issues/83
......
...@@ -35,7 +35,6 @@ task Annotate { ...@@ -35,7 +35,6 @@ task Annotate {
Boolean keepSites = false Boolean keepSites = false
String? markSites String? markSites
Boolean noVersion = false Boolean noVersion = false
String outputType = "z"
String? regions String? regions
File? regionsFile File? regionsFile
File? renameChrs File? renameChrs
...@@ -52,14 +51,14 @@ task Annotate { ...@@ -52,14 +51,14 @@ task Annotate {
String dockerImage = "quay.io/biocontainers/bcftools:1.10.2--h4f4756c_2" String dockerImage = "quay.io/biocontainers/bcftools:1.10.2--h4f4756c_2"
} }
Boolean indexing = if outputType == "z" then true else false Boolean compressed = basename(outputPath) != basename(outputPath, ".gz")
command { command {
set -e set -e
mkdir -p "$(dirname ~{outputPath})" mkdir -p "$(dirname ~{outputPath})"
bcftools annotate \ bcftools annotate \
-o ~{outputPath} \ -o ~{outputPath} \
-O ~{outputType} \ -O ~{true="z" false="v" compressed} \
~{"--annotations " + annsFile} \ ~{"--annotations " + annsFile} \
~{"--collapse " + collapse} \ ~{"--collapse " + collapse} \
~{true="--columns" false="" length(columns) > 0} ~{sep="," columns} \ ~{true="--columns" false="" length(columns) > 0} ~{sep="," columns} \
...@@ -80,7 +79,7 @@ task Annotate { ...@@ -80,7 +79,7 @@ task Annotate {
~{true="--remove" false="" length(removeAnns) > 0} ~{sep="," removeAnns} \ ~{true="--remove" false="" length(removeAnns) > 0} ~{sep="," removeAnns} \
~{inputFile} ~{inputFile}
~{if indexing then 'bcftools index --tbi ~{outputPath}' else ''} ~{if compressed then 'bcftools index --tbi ~{outputPath}' else ''}
} }
...@@ -97,7 +96,6 @@ task Annotate { ...@@ -97,7 +96,6 @@ task Annotate {
parameter_meta { parameter_meta {
outputPath: {description: "The location the output VCF file should be written.", category: "common"} outputPath: {description: "The location the output VCF file should be written.", category: "common"}
outputType: {description: "Output type: v=vcf, z=vcf.gz, b=bcf, u=uncompressed bcf", category: "advanced"}
annsFile: {description: "Bgzip-compressed and tabix-indexed file with annotations (see man page for details).", category: "advanced"} annsFile: {description: "Bgzip-compressed and tabix-indexed file with annotations (see man page for details).", category: "advanced"}
collapse: {description: "Treat as identical records with <snps|indels|both|all|some|none>, see man page for details.", category: "advanced"} collapse: {description: "Treat as identical records with <snps|indels|both|all|some|none>, see man page for details.", category: "advanced"}
columns: {description: "Comma-separated list of columns or tags to carry over from the annotation file (see man page for details).", category: "advanced"} columns: {description: "Comma-separated list of columns or tags to carry over from the annotation file (see man page for details).", category: "advanced"}
...@@ -129,23 +127,24 @@ task Sort { ...@@ -129,23 +127,24 @@ task Sort {
input { input {
File inputFile File inputFile
String outputPath = "output.vcf.gz" String outputPath = "output.vcf.gz"
String tmpDir = "./sorting-tmp"
String memory = "256M" String memory = "256M"
Int timeMinutes = 1 + ceil(size(inputFile, "G")) Int timeMinutes = 1 + ceil(size(inputFile, "G"))
String dockerImage = "quay.io/biocontainers/bcftools:1.10.2--h4f4756c_2" String dockerImage = "quay.io/biocontainers/bcftools:1.10.2--h4f4756c_2"
String outputType = "z"
} }
Boolean indexing = if outputType == "z" then true else false Boolean compressed = basename(outputPath) != basename(outputPath, ".gz")
command { command {
set -e set -e
mkdir -p "$(dirname ~{outputPath})" mkdir -p "$(dirname ~{outputPath})" ~{tmpDir}
bcftools sort \ bcftools sort \
-o ~{outputPath} \ -o ~{outputPath} \
-O ~{outputType} \ -O ~{true="z" false="v" compressed} \
-T ~{tmpDir} \
~{inputFile} ~{inputFile}
~{if indexing then 'bcftools index --tbi ~{outputPath}' else ''} ~{if compressed then 'bcftools index --tbi ~{outputPath}' else ''}
} }
output { output {
...@@ -162,7 +161,7 @@ task Sort { ...@@ -162,7 +161,7 @@ task Sort {
parameter_meta { parameter_meta {
inputFile: {description: "A vcf or bcf file.", category: "required"} inputFile: {description: "A vcf or bcf file.", category: "required"}
outputPath: {description: "The location the output VCF file should be written.", category: "common"} outputPath: {description: "The location the output VCF file should be written.", category: "common"}
outputType: {description: "Output type: v=vcf, z=vcf.gz, b=bcf, u=uncompressed bcf", category: "advanced"} tmpDir: {description: "The location of the temporary files during the bcftools sorting.", category: "advanced"}
memory: {description: "The amount of memory this job will use.", category: "advanced"} memory: {description: "The amount of memory this job will use.", category: "advanced"}
timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"} timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"}
dockerImage: {description: "The docker image used for this task. Changing this may result in errors which the developers may choose not to address.", category: "advanced"} dockerImage: {description: "The docker image used for this task. Changing this may result in errors which the developers may choose not to address.", category: "advanced"}
...@@ -280,26 +279,22 @@ task View { ...@@ -280,26 +279,22 @@ task View {
input { input {
File inputFile File inputFile
String outputPath = "output.vcf" String outputPath = "output.vcf"
Int compressionLevel = 0
String memory = "256M" String memory = "256M"
Int timeMinutes = 1 + ceil(size(inputFile, "G")) Int timeMinutes = 1 + ceil(size(inputFile, "G"))
String dockerImage = "quay.io/biocontainers/bcftools:1.10.2--h4f4756c_2" String dockerImage = "quay.io/biocontainers/bcftools:1.10.2--h4f4756c_2"
} }
String outputType = if compressionLevel > 0 then "z" else "v" Boolean compressed = basename(outputPath) != basename(outputPath, ".gz")
Boolean indexing = if compressionLevel > 0 then true else false
String outputFilePath = if compressionLevel > 0 then outputPath + ".gz" else outputPath
command { command {
set -e set -e
mkdir -p "$(dirname ~{outputPath})" mkdir -p "$(dirname ~{outputPath})"
bcftools view \ bcftools view \
-o ~{outputPath} \ -o ~{outputPath} \
-l ~{compressionLevel} \ -O ~{true="z" false="v" compressed} \
-O ~{outputType} \
~{inputFile} ~{inputFile}
~{if indexing then 'bcftools index --tbi ~{outputPath}' else ''} ~{if compressed then 'bcftools index --tbi ~{outputPath}' else ''}
} }
output { output {
File outputVcf = outputPath File outputVcf = outputPath
...@@ -314,7 +309,6 @@ task View { ...@@ -314,7 +309,6 @@ task View {
parameter_meta { parameter_meta {
inputFile: {description: "A vcf or bcf file.", category: "required"} inputFile: {description: "A vcf or bcf file.", category: "required"}
compressionLevel: {description: "Compression level from 0 (uncompressed) to 9 (best).", category: "advanced"}
outputPath: {description: "The location the output VCF file should be written.", category: "common"} outputPath: {description: "The location the output VCF file should be written.", category: "common"}
memory: {description: "The amount of memory this job will use.", category: "advanced"} memory: {description: "The amount of memory this job will use.", category: "advanced"}
timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"} timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"}
......
...@@ -92,6 +92,7 @@ task NanoPlot { ...@@ -92,6 +92,7 @@ task NanoPlot {
inputFileType: {description: "The format of the read file.", category: "required"} inputFileType: {description: "The format of the read file.", category: "required"}
outputDir: {description: "Output directory path.", category: "required"} outputDir: {description: "Output directory path.", category: "required"}
outputPrefix: {description: "Output file prefix.", category: "required"} outputPrefix: {description: "Output file prefix.", category: "required"}
outputPath: {description: "Combination of the outputDir & outputPrefix strings.", category: "advanced"}
outputTsvStats: {description: "Output the stats file as a properly formatted TSV.", category: "common"} outputTsvStats: {description: "Output the stats file as a properly formatted TSV.", category: "common"}
dropOutliers: {description: "Drop outlier reads with extreme long length.", category: "advanced"} dropOutliers: {description: "Drop outlier reads with extreme long length.", category: "advanced"}
logLengths: {description: "Additionally show logarithmic scaling of lengths in plots.", category: "advanced"} logLengths: {description: "Additionally show logarithmic scaling of lengths in plots.", category: "advanced"}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment