Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
T
tasks
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container Registry
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
biowdl
tasks
Commits
64427306
Commit
64427306
authored
2 years ago
by
Cats
Browse files
Options
Downloads
Patches
Plain Diff
fix some issues in fastp, add picard CollectInzertSizeMetrics
parent
bf7aba3c
No related branches found
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
fastp.wdl
+16
-12
16 additions, 12 deletions
fastp.wdl
picard.wdl
+52
-0
52 additions, 0 deletions
picard.wdl
with
68 additions
and
12 deletions
fastp.wdl
+
16
−
12
View file @
64427306
ver
i
son 1.0
vers
i
on 1.0
# MIT License
# MIT License
#
#
...
@@ -24,8 +24,8 @@ verison 1.0
...
@@ -24,8 +24,8 @@ verison 1.0
task Fastp {
task Fastp {
input {
input {
File r1
File r
ead
1
File r2
File r
ead
2
String outputPathR1
String outputPathR1
String outputPathR2
String outputPathR2
String htmlPath
String htmlPath
...
@@ -35,24 +35,26 @@ task Fastp {
...
@@ -35,24 +35,26 @@ task Fastp {
Boolean correction = false
Boolean correction = false
Int lengthRequired = 15
Int lengthRequired = 15
Int? split
Int? split
Boolean performAdapterTrimming = true
Int threads = 4
Int threads = 4
String memory = "5GiB"
String memory = "5GiB"
Int timeMinutes = 1 + ceil(size([read1, read2], "G") * 7.0 /
core
s)
Int timeMinutes = 1 + ceil(size([read1, read2], "G") * 7.0 /
thread
s)
String dockerImage = "quay.io/biocontainers/fastp:0.23.2--h5f740d0_3"
String dockerImage = "quay.io/biocontainers/fastp:0.23.2--h5f740d0_3"
}
}
String outputDirR1 = sub(outputPathR1, basename(outputPathR1), "")
String outputDirR1 = sub(outputPathR1, basename(outputPathR1), "")
String outputDirR2 = sub(outputPathR2, basename(outputPathR2), "")
command
{
command
<<<
set -e
set -e
mkdir -p $(dirname ~{outputPathR1} ~{outputPathR2} ~{htmlPath} ~{jsonPath})
mkdir -p $(dirname ~{outputPathR1} ~{outputPathR2} ~{htmlPath} ~{jsonPath})
# predict output paths
# predict output paths
seq 1 ~{if defined(split) then split else "2"} | awk '{print "~{outputDirR1}/"$0".~{basename(outputPathR1)}"}' > r1_paths
seq 1 ~{if defined(split) then split else "2"} | awk '{print "~{outputDirR1}/"$0".~{basename(outputPathR1)}"}' > r1_paths
seq 1 ~{if defined(split) then split else "2"} | awk '{print "~{outputDirR2}/"$0".~{basename(outputPathR2)}"}' > r2_paths
seq 1 ~{if defined(split) then split else "2"} | awk '{print "~{outputDirR2}/"$0".~{basename(outputPathR2)}"}' > r2_paths
fastp \
fastp \
-i ~{r1} \
-i ~{r
ead
1} \
~{"-I " + r2} \
~{"-I " + r
ead
2} \
-o ~{outputPathR1} \
-o ~{outputPathR1} \
~{"-O " + outputPathR2} \
~{"-O " + outputPathR2} \
-h ~{htmlPath} \
-h ~{htmlPath} \
...
@@ -62,8 +64,9 @@ task Fastp {
...
@@ -62,8 +64,9 @@ task Fastp {
--length_required ~{lengthRequired} \
--length_required ~{lengthRequired} \
--threads ~{threads} \
--threads ~{threads} \
~{"--split " + split} \
~{"--split " + split} \
~{if defined(split) then "-d 0" else ""}
~{if defined(split) then "-d 0" else ""} \
}
~{if performAdapterTrimming then "" else "--disable_adapter_trimming"}
>>>
Array[String] r1Paths = read_lines("r1_paths")
Array[String] r1Paths = read_lines("r1_paths")
Array[String] r2Paths = read_lines("r2_paths")
Array[String] r2Paths = read_lines("r2_paths")
...
@@ -76,15 +79,15 @@ task Fastp {
...
@@ -76,15 +79,15 @@ task Fastp {
}
}
runtime {
runtime {
cpu:
core
s
cpu:
thread
s
memory: memory
memory: memory
time_minutes: timeMinutes
time_minutes: timeMinutes
docker: dockerImage
docker: dockerImage
}
}
parameter_meta {
parameter_meta {
r1: {description: "The R1 fastq file.", category: "required"}
r
ead
1: {description: "The R1 fastq file.", category: "required"}
r2: {description: "The R2 fastq file.", category: "required"}
r
ead
2: {description: "The R2 fastq file.", category: "required"}
outputPathR1: {description: "The output path for the R1 file.", category: "required"}
outputPathR1: {description: "The output path for the R1 file.", category: "required"}
outputPathR2: {description: "The output path for the R2 file.", category: "required"}
outputPathR2: {description: "The output path for the R2 file.", category: "required"}
htmlPath: {description: "The path to write the html report to.", category: "required"}
htmlPath: {description: "The path to write the html report to.", category: "required"}
...
@@ -93,6 +96,7 @@ task Fastp {
...
@@ -93,6 +96,7 @@ task Fastp {
correction: {description: "Whether or not to apply overlap based correction.", category: "advanced"}
correction: {description: "Whether or not to apply overlap based correction.", category: "advanced"}
lengthRequired: {description: "The minimum read length.", category: "advanced"}
lengthRequired: {description: "The minimum read length.", category: "advanced"}
split: {description: "The number of chunks to split the files into.", category: "common"}
split: {description: "The number of chunks to split the files into.", category: "common"}
performAdapterTrimming: {description: "Whether adapter trimming should be performed or not.", category: "advanced"}
threads: {description: "The number of threads to use.", category: "advanced"}
threads: {description: "The number of threads to use.", category: "advanced"}
memory: {description: "The amount of memory this job will use.", category: "advanced"}
memory: {description: "The amount of memory this job will use.", category: "advanced"}
timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"}
timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"}
...
...
This diff is collapsed.
Click to expand it.
picard.wdl
+
52
−
0
View file @
64427306
...
@@ -136,6 +136,58 @@ task CollectHsMetrics {
...
@@ -136,6 +136,58 @@ task CollectHsMetrics {
}
}
}
}
task CollectInsertSizeMetrics {
input {
File inputBam
File inputBamIndex
Float? minimumPercentage
String basename = "./insertSize_metrics"
String memory = "5GiB"
String javaXmx = "4G"
Int timeMinutes = 1 + ceil(size(inputBam, "GiB") * 6)
String dockerImage = "quay.io/biocontainers/picard:2.23.2--0"
}
command {
set -e
mkdir -p "$(dirname ~{basename})"
picard -Xmx~{javaXmx} -XX:ParallelGCThreads=1 \
CollectInsertSizeMetrics \
I=~{inputBam} \
O=~{basename}.txt \
H=~{basename}.pdf \
~{"M=" + minimumPercentage}
}
output {
File metricsTxt = "~{basename}.txt"
File metricsPdf = "~{basename}.pdf"
}
runtime {
docker: dockerImage
time_minutes: timeMinutes
memory: memory
}
parameter_meta {
# inputs
inputBam: {description: "The input BAM file for which metrics will be collected.", category: "required"}
inputBamIndex: {description: "The index of the input BAM file.", category: "required"}
minimumPercentage: {description: "Equivalent to picard CollectInsertSizeMetrics' `M` option.", category: "advanced"}
basename: {description: "The basename for the output files.", category: "common"}
memory: {description: "The amount of memory this job will use.", category: "advanced"}
javaXmx: {description: "The maximum memory available to the program. Should be lower than `memory` to accommodate JVM overhead.",
category: "advanced"}
timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"}
dockerImage: {description: "The docker image used for this task. Changing this may result in errors which the developers may choose not to address.",
category: "advanced"}
}
}
task CollectMultipleMetrics {
task CollectMultipleMetrics {
input {
input {
File inputBam
File inputBam
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment