Commit 00e96408 authored by Peter van 't Hof's avatar Peter van 't Hof

Added main flag to most main jobs

parent 287c3a7e
......@@ -36,7 +36,7 @@ trait BiopetCommandLineFunction extends CommandLineResources { biopetFunction =>
var executable: String = _
def mainFunction = false
var mainFunction = false
/** This is the default shell for drmaa jobs */
def defaultRemoteCommand = "bash"
......
......@@ -85,7 +85,10 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
this match {
case q: MultiSampleQScript if q.onlySamples.nonEmpty && !q.samples.forall(x => q.onlySamples.contains(x._1)) =>
logger.info("Write report is skipped because sample flag is used")
case _ => reportClass.foreach(add(_))
case _ => reportClass.foreach { report =>
report.mainFunction = true
add(report)
}
}
logger.info("Running pre commands")
......
......@@ -14,11 +14,13 @@
*/
package nl.lumc.sasc.biopet.core
import java.io.{ File, PrintWriter }
import java.io.{File, PrintWriter}
import nl.lumc.sasc.biopet.core.summary.WriteSummary
import nl.lumc.sasc.biopet.utils.config.Configurable
import nl.lumc.sasc.biopet.utils.{ Logging, ConfigUtils }
import org.broadinstitute.gatk.queue.function.{ CommandLineFunction, QFunction }
import nl.lumc.sasc.biopet.utils.{ConfigUtils, Logging}
import org.broadinstitute.gatk.queue.function.{CommandLineFunction, QFunction}
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
......@@ -112,6 +114,7 @@ object WriteDependencies extends Logging with Configurable {
case _ => None
}), "main_job" -> (f match {
case cmd: BiopetCommandLineFunction => cmd.mainFunction
case _: WriteSummary => true
case _ => false
}), "intermediate" -> f.isIntermediate,
"depends_on_intermediate" -> f.inputs.exists(files(_).isIntermediate),
......
......@@ -75,6 +75,7 @@ class Carp(val root: Configurable) extends QScript with MultisampleMappingTrait
samtoolsView.output = preProcessBam.get
samtoolsView.b = true
samtoolsView.h = true
samtoolsView.mainFunction = true
add(samtoolsView)
val bamMetricsFilter = BamMetrics(qscript, preProcessBam.get, new File(sampleDir, "metrics-filter"), sampleId = Some(sampleId))
......@@ -94,6 +95,7 @@ class Carp(val root: Configurable) extends QScript with MultisampleMappingTrait
macs2.name = Some(sampleId)
macs2.outputdir = sampleDir + File.separator + "macs2" + File.separator + sampleId + File.separator
macs2.fileformat = if (paired) Some("BAMPE") else Some("BAM")
macs2.mainFunction = true
add(macs2)
}
}
......@@ -131,6 +133,7 @@ class Carp(val root: Configurable) extends QScript with MultisampleMappingTrait
macs2.name = Some(sampleId + "_VS_" + controlId)
macs2.fileformat = if (paired) Some("BAMPE") else Some("BAM")
macs2.outputdir = sample.sampleDir + File.separator + "macs2" + File.separator + macs2.name.get + File.separator
macs2.mainFunction = true
add(macs2)
}
}
......
......@@ -113,6 +113,7 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
if (paired) outputFiles += ("fastq_input_R2" -> inputR2.get)
fastqcR1 = Fastqc(this, inputR1, new File(outputDir, R1Name + ".fastqc/"))
fastqcR1.mainFunction = true
add(fastqcR1)
addSummarizable(fastqcR1, "fastqc_R1")
outputFiles += ("fastqc_R1" -> fastqcR1.output)
......@@ -132,6 +133,7 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
if (paired) {
fastqcR2 = Fastqc(this, inputR2.get, new File(outputDir, R2Name + ".fastqc/"))
fastqcR2.mainFunction = true
add(fastqcR2)
addSummarizable(fastqcR2, "fastqc_R2")
outputFiles += ("fastqc_R2" -> fastqcR2.output)
......@@ -228,6 +230,7 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
pipe.deps ::= fastqcR1.output
pipe.deps ::= fastqcR2.output
pipe.isIntermediate = !keepQcFastqFiles
pipe.mainFunction = true
add(pipe)
addSummarizable(fqSync, "fastq_sync")
......
......@@ -60,6 +60,7 @@ class CombineReads(val root: Configurable) extends QScript with SummaryQScript w
flash.fastqR1 = fastqR1
flash.fastqR2 = fastqR2
flash.isIntermediate = (forwardPrimers ::: reversePrimers).nonEmpty
flash.mainFunction = true
add(flash)
if ((forwardPrimers ::: reversePrimers).nonEmpty) {
......@@ -68,6 +69,7 @@ class CombineReads(val root: Configurable) extends QScript with SummaryQScript w
cutadapt.fastqOutput = this.combinedFastq
cutadapt.statsOutput = swapExt(outputDir, cutadapt.fastqOutput, ".fastq.gz", ".stats")
(forwardPrimers ::: reversePrimers).foreach(cutadapt.anywhere += _)
cutadapt.mainFunction = true
add(cutadapt)
addSummarizable(cutadapt, "cutadapt")
}
......
......@@ -56,6 +56,7 @@ class ExtractUnmappedReads(val root: Configurable) extends QScript with BiopetQS
if (paired) samtoolsViewSelectUnmapped.f = List("12")
else samtoolsViewSelectUnmapped.f = List("4")
samtoolsViewSelectUnmapped.isIntermediate = true
samtoolsViewSelectUnmapped.mainFunction = true
add(samtoolsViewSelectUnmapped)
// start bam to fastq (only on unaligned reads) also extract the matesam
......@@ -67,6 +68,7 @@ class ExtractUnmappedReads(val root: Configurable) extends QScript with BiopetQS
samToFastq.fastqUnpaired = fastqUnmappedSingletons
}
samToFastq.isIntermediate = !config("keep_unmapped_fastq", default = false).asBoolean
samToFastq.mainFunction = true
add(samToFastq)
}
}
......@@ -37,6 +37,7 @@ class GearsCentrifuge(val root: Configurable) extends QScript with SummaryQScrip
centrifuge.metFile = Some(centrifugeMetOutput)
val centrifugeCmd = centrifuge | new Gzip(this) > centrifugeOutput
centrifugeCmd.threadsCorrection = -1
centrifugeCmd.mainFunction = true
add(centrifugeCmd)
makeKreport("centrifuge", unique = false)
......@@ -51,12 +52,15 @@ class GearsCentrifuge(val root: Configurable) extends QScript with SummaryQScrip
centrifugeKreport.centrifugeOutputFiles :+= fifo
centrifugeKreport.output = new File(outputDir, s"$outputName.$name.kreport")
centrifugeKreport.onlyUnique = unique
add(new BiopetFifoPipe(this, List(centrifugeKreport, Zcat(this, centrifugeOutput, fifo))))
val pipe = new BiopetFifoPipe(this, List(centrifugeKreport, Zcat(this, centrifugeOutput, fifo))))
pipe.mainFunction = true
add(pipe)
val krakenReportJSON = new KrakenReportToJson(this)
krakenReportJSON.inputReport = centrifugeKreport.output
krakenReportJSON.output = new File(outputDir, s"$outputName.$name.krkn.json")
krakenReportJSON.skipNames = config("skipNames", default = false)
krakenReportJSON.mainFunction = true
add(krakenReportJSON)
addSummarizable(krakenReportJSON, s"${name}_report")
}
......
......@@ -52,6 +52,7 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
seqtk.output = new File(outputDir, input.getName + ".fasta")
seqtk.A = true
seqtk.isIntermediate = true
seqtk.mainFunction = true
add(seqtk)
seqtk.output
}
......@@ -71,6 +72,7 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
krakenAnalysis.classifiedOut = Some(new File(outputDir, s"$outputName.krkn.classified.fastq"))
krakenAnalysis.unclassifiedOut = Some(new File(outputDir, s"$outputName.krkn.unclassified.fastq"))
krakenAnalysis.mainFunction = true
add(krakenAnalysis)
outputFiles += ("kraken_output_raw" -> krakenAnalysis.output)
......@@ -82,6 +84,7 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
krakenReport.input = krakenAnalysis.output
krakenReport.showZeros = true
krakenReport.output = new File(outputDir, s"$outputName.krkn.full")
krakenReport.mainFunction = true
add(krakenReport)
outputFiles += ("kraken_report_input" -> krakenReport.input)
......@@ -91,6 +94,7 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
krakenReportJSON.inputReport = krakenReport.output
krakenReportJSON.output = new File(outputDir, s"$outputName.krkn.json")
krakenReportJSON.skipNames = config("skipNames", default = false)
krakenReportJSON.mainFunction = true
add(krakenReportJSON)
addSummarizable(krakenReportJSON, "krakenreport")
......
......@@ -51,11 +51,13 @@ class GearsQiimeOpen(val root: Configurable) extends QScript with SummaryQScript
splitLib.input :+= fastqInput
splitLib.outputDir = new File(outputDir, "split_libraries_fastq")
sampleId.foreach(splitLib.sampleIds :+= _.replaceAll("_", "-"))
splitLib.mainFunction = true
add(splitLib)
val openReference = new PickOpenReferenceOtus(this)
openReference.inputFasta = addDownsample(splitLib.outputSeqs, new File(splitLib.outputDir, s"${sampleId.get}.downsample.fna"))
openReference.outputDir = new File(outputDir, "pick_open_reference_otus")
openReference.mainFunction = true
add(openReference)
_otuMap = openReference.otuMap
_otuTable = openReference.otuTable
......
......@@ -60,6 +60,7 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
slfR1.input :+= fastqR1
slfR1.outputDir = new File(outputDir, "split_libraries_fastq_R1")
sampleId.foreach(slfR1.sampleIds :+= _)
slfR1.mainFunction = true
add(slfR1)
lazy val slfR2 = fastqR2.map { file =>
......@@ -74,6 +75,7 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
val pickOtus = new PickOtus(this)
pickOtus.inputFasta = slfR1.outputSeqs
pickOtus.outputDir = new File(outputDir, "pick_otus")
pickOtus.mainFunction = true
add(pickOtus)
val pickRepSet = new PickRepSet(this)
......@@ -83,6 +85,7 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
pickRepSet.outputFasta = Some(new File(repSetOutputDir, slfR1.outputSeqs.getName))
pickRepSet.logFile = Some(new File(repSetOutputDir, slfR1.outputSeqs.getName
.stripSuffix(".fasta").stripSuffix(".fa").stripSuffix(".fna") + ".log"))
pickRepSet.mainFunction = true
add(pickRepSet)
val assignTaxonomy = new AssignTaxonomy(this)
......@@ -91,6 +94,7 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
assignTaxonomy.inputFasta = pickRepSet.outputFasta.get
assignTaxonomy.read1SeqsFp = Some(slfR1.outputSeqs)
assignTaxonomy.read2SeqsFp = slfR2.map(_.outputSeqs)
assignTaxonomy.mainFunction = true
add(assignTaxonomy)
}
}
......@@ -38,6 +38,7 @@ class GearsSeqCount(val root: Configurable) extends QScript with BiopetQScript w
val seqCount = new SageCountFastq(this)
seqCount.input = fastqInput
seqCount.output = countFile
seqCount.mainFunction = true
add(seqCount)
}
}
......@@ -211,6 +211,7 @@ class Gentrap(val root: Configurable) extends QScript
ribosomalRefFlat().foreach(job.intervalFile = _)
job.outputBam = createFile("cleaned.bam")
job.discardedBam = Some(createFile("rrna.bam"))
job.mainFunction = true
add(job)
Some(job.outputBam)
} else bamFile
......
......@@ -35,6 +35,7 @@ class BaseCounts(val root: Configurable) extends QScript with Measurement with A
baseCounter.outputDir = new File(outputDir, id)
baseCounter.prefix = id
baseCounter.refFlat = annotationRefFlat()
baseCounter.mainFunction = true
add(baseCounter)
id -> baseCounter
}
......
......@@ -33,6 +33,7 @@ trait CufflinksMeasurement extends QScript with Measurement {
val jobs = bamFiles.map {
case (id, file) =>
val cufflinks = makeCufflinksJob(id, file)
cufflinks.mainFunction = true
add(cufflinks)
id -> cufflinks
}
......
......@@ -41,6 +41,7 @@ class FragmentsPerGene(val root: Configurable) extends QScript with Measurement
sortSam.output = swapExt(outputDir, file, ".bam", ".idsorted.bam")
sortSam.sortOrder = "queryname"
sortSam.isIntermediate = true
sortSam.mainFunction = true
add(sortSam)
sortSam.output
} else file
......@@ -51,6 +52,7 @@ class FragmentsPerGene(val root: Configurable) extends QScript with Measurement
job.output = new File(outputDir, s"$id.$name.counts")
job.format = Option("bam")
job.order = if (sortOnId) Some("name") else Some("pos")
job.mainFunction = true
add(job)
id -> job
}
......
......@@ -200,6 +200,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
if (chunking) {
val fastSplitterR1 = new FastqSplitter(this)
fastSplitterR1.mainFunction = true
fastSplitterR1.input = inputR1
for ((chunkDir, fastqfile) <- chunks) fastSplitterR1.output :+= fastqfile._1
fastSplitterR1.isIntermediate = true
......@@ -207,6 +208,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
if (paired) {
val fastSplitterR2 = new FastqSplitter(this)
fastSplitterR2.mainFunction = true
fastSplitterR2.input = inputR2.get
for ((chunkDir, fastqfile) <- chunks) fastSplitterR2.output :+= fastqfile._2.get
fastSplitterR2.isIntermediate = true
......@@ -255,10 +257,12 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
if (!skipMarkduplicates) {
bamFile = new File(outputDir, outputName + ".dedup.bam")
val md = MarkDuplicates(this, bamFiles, bamFile)
md.mainFunction = true
add(md)
addSummarizable(md, "mark_duplicates")
} else if (skipMarkduplicates && chunking) {
val mergeSamFile = MergeSamFiles(this, bamFiles, new File(outputDir, outputName + ".merge.bam"))
mergeSamFile.mainFunction = true
add(mergeSamFile)
bamFile = mergeSamFile.output
}
......@@ -297,6 +301,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
bwaAlnR1.fastq = R1
bwaAlnR1.output = swapExt(output.getParent, output, ".bam", ".R1.sai")
bwaAlnR1.isIntermediate = true
bwaAlnR1.mainFunction = true
add(bwaAlnR1)
val samFile: File = if (paired) {
......@@ -304,6 +309,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
bwaAlnR2.fastq = R2.get
bwaAlnR2.output = swapExt(output.getParent, output, ".bam", ".R2.sai")
bwaAlnR2.isIntermediate = true
bwaAlnR2.mainFunction = true
add(bwaAlnR2)
val bwaSampe = new BwaSampe(this)
......@@ -314,6 +320,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
bwaSampe.r = getReadGroupBwa
bwaSampe.output = swapExt(output.getParent, output, ".bam", ".sam")
bwaSampe.isIntermediate = true
bwaSampe.mainFunction = true
add(bwaSampe)
bwaSampe.output
......@@ -324,6 +331,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
bwaSamse.r = getReadGroupBwa
bwaSamse.output = swapExt(output.getParent, output, ".bam", ".sam")
bwaSamse.isIntermediate = true
bwaSamse.mainFunction = true
add(bwaSamse)
bwaSamse.output
......@@ -331,6 +339,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val sortSam = SortSam(this, samFile, output)
if (chunking || !skipMarkduplicates) sortSam.isIntermediate = true
sortSam.mainFunction = true
add(sortSam)
sortSam.output
}
......@@ -346,6 +355,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val pipe = bwaCommand | sortSam
pipe.isIntermediate = chunking || !skipMarkduplicates
pipe.threadsCorrection = -1
pipe.mainFunction = true
add(pipe)
output
}
......@@ -362,6 +372,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val ar = addAddOrReplaceReadGroups(reorderSam.output, output)
val pipe = new BiopetFifoPipe(this, gsnapCommand :: ar._1 :: reorderSam :: Nil)
pipe.threadsCorrection = -2
pipe.mainFunction = true
add(pipe)
ar._2
}
......@@ -387,6 +398,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val pipe = hisat2 | sortSam
pipe.isIntermediate = chunking || !skipMarkduplicates
pipe.threadsCorrection = 1
pipe.mainFunction = true
add(pipe)
output
......@@ -402,6 +414,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
tophat.noConvertBam = false
// and always keep input ordering
tophat.keepFastaOrder = true
tophat.mainFunction = true
add(tophat)
// fix unmapped file coordinates
......@@ -416,6 +429,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val sorter = SortSam(this, fixer.outputSam, new File(tophat.outputDir, "unmapped_fixup.sorted.bam"))
sorter.sortOrder = "coordinate"
sorter.isIntermediate = true
sorter.mainFunction
add(sorter)
// merge with mapped file
......@@ -456,9 +470,11 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
stampyCmd.sanger = true
stampyCmd.output = this.swapExt(output.getParentFile, output, ".bam", ".sam")
stampyCmd.isIntermediate = true
stampyCmd.mainFunction = true
add(stampyCmd)
val sortSam = SortSam(this, stampyCmd.output, output)
if (chunking || !skipMarkduplicates) sortSam.isIntermediate = true
sortSam.mainFunction = true
add(sortSam)
sortSam.output
}
......@@ -477,6 +493,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val ar = addAddOrReplaceReadGroups(bowtie.output, output)
val pipe = new BiopetFifoPipe(this, (Some(bowtie) :: Some(ar._1) :: Nil).flatten)
pipe.threadsCorrection = -1
pipe.mainFunction = true
add(pipe)
ar._2
}
......@@ -496,6 +513,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val pipe = bowtie2 | sortSam
pipe.isIntermediate = chunking || !skipMarkduplicates
pipe.threadsCorrection = -1
pipe.mainFunction = true
add(pipe)
output
}
......@@ -516,6 +534,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
pipe.threadsCorrection = -3
zcatR1._1.foreach(x => pipe.threadsCorrection -= 1)
zcatR2.foreach(_._1.foreach(x => pipe.threadsCorrection -= 1))
pipe.mainFunction = true
add(pipe)
reorderSam.output
}
......@@ -528,6 +547,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
zcatR2.foreach(_._1.foreach(add(_)))
val starCommand = Star._2pass(this, zcatR1._2, zcatR2.map(_._2), outputDir, isIntermediate = true)
starCommand._2.foreach(_.mainFunction = true)
addAll(starCommand._2)
val ar = addAddOrReplaceReadGroups(starCommand._1, output)
add(ar._1)
......
......@@ -217,10 +217,12 @@ class Shiva(val root: Configurable) extends QScript with MultisampleMappingTrait
def addIndelRealign(inputBam: File, dir: File, isIntermediate: Boolean): File = {
val realignerTargetCreator = RealignerTargetCreator(this, inputBam, dir)
realignerTargetCreator.isIntermediate = true
realignerTargetCreator.mainFunction = true
add(realignerTargetCreator)
val indelRealigner = IndelRealigner(this, inputBam, realignerTargetCreator.out, dir)
indelRealigner.isIntermediate = isIntermediate
indelRealigner.mainFunction = true
add(indelRealigner)
indelRealigner.out
......@@ -231,6 +233,7 @@ class Shiva(val root: Configurable) extends QScript with MultisampleMappingTrait
val baseRecalibrator = BaseRecalibrator(this, inputBam, swapExt(dir, inputBam, ".bam", ".baserecal"))
if (baseRecalibrator.knownSites.isEmpty) return inputBam
baseRecalibrator.mainFunction = true
add(baseRecalibrator)
if (config("use_analyze_covariates", default = true).asBoolean) {
......@@ -244,6 +247,7 @@ class Shiva(val root: Configurable) extends QScript with MultisampleMappingTrait
val printReads = PrintReads(this, inputBam, swapExt(dir, inputBam, ".bam", ".baserecal.bam"))
printReads.BQSR = Some(baseRecalibrator.out)
printReads.isIntermediate = isIntermediate
printReads.mainFunction = true
add(printReads)
printReads.out
......
......@@ -76,6 +76,7 @@ class ShivaSvCalling(val root: Configurable) extends QScript with SummaryQScript
val mergeSVcalls = new Pysvtools(this)
mergeSVcalls.input = sampleVCFS.flatten
mergeSVcalls.output = new File(outputDir, sample + ".merged.vcf")
mergeSVcalls.mainFunction = true
add(mergeSVcalls)
outputMergedVCFbySample += (sample -> mergeSVcalls.output)
}
......@@ -84,6 +85,7 @@ class ShivaSvCalling(val root: Configurable) extends QScript with SummaryQScript
val mergeSVcallsProject = new Pysvtools(this)
mergeSVcallsProject.input = outputMergedVCFbySample.values.toList
mergeSVcallsProject.output = outputMergedVCF
mergeSVcallsProject.mainFunction = true
add(mergeSVcallsProject)
// merging the VCF calls by project
......@@ -104,14 +106,7 @@ class ShivaSvCalling(val root: Configurable) extends QScript with SummaryQScript
def summarySettings = Map("sv_callers" -> configCallers.toList)
/** Files for the summary */
def summaryFiles: Map[String, File] = {
val callers: Set[String] = configCallers
//callersList.filter(x => callers.contains(x.name)).map(x => x.name -> x.outputFile).toMap + ("final" -> finalFile)
Map(
"final_mergedvcf" -> outputMergedVCF
)
}
def summaryFiles: Map[String, File] = Map("final_mergedvcf" -> outputMergedVCF)
}
object ShivaSvCalling extends PipelineCommand
\ No newline at end of file
......@@ -106,22 +106,27 @@ class ShivaVariantcalling(val root: Configurable) extends QScript
if (normalize && decompose) {
vtNormalize.outputVcf = swapExt(caller.outputDir, caller.outputFile, ".vcf.gz", ".normalized.vcf.gz")
vtNormalize.isIntermediate = true
vtNormalize.mainFunction = true
add(vtNormalize, Tabix(this, vtNormalize.outputVcf))
vtDecompose.inputVcf = vtNormalize.outputVcf
vtDecompose.outputVcf = swapExt(caller.outputDir, vtNormalize.outputVcf, ".vcf.gz", ".decompose.vcf.gz")
vtDecompose.mainFunction = true
add(vtDecompose, Tabix(this, vtDecompose.outputVcf))
cv.variant :+= TaggedFile(vtDecompose.outputVcf, caller.name)
} else if (normalize && !decompose) {
vtNormalize.outputVcf = swapExt(caller.outputDir, caller.outputFile, ".vcf.gz", ".normalized.vcf.gz")
vtNormalize.mainFunction = true
add(vtNormalize, Tabix(this, vtNormalize.outputVcf))
cv.variant :+= TaggedFile(vtNormalize.outputVcf, caller.name)
} else if (!normalize && decompose) {
vtDecompose.inputVcf = caller.outputFile
vtDecompose.outputVcf = swapExt(caller.outputDir, caller.outputFile, ".vcf.gz", ".decompose.vcf.gz")
vtDecompose.mainFunction = true
add(vtDecompose, Tabix(this, vtDecompose.outputVcf))
cv.variant :+= TaggedFile(vtDecompose.outputVcf, caller.name)
} else cv.variant :+= TaggedFile(caller.outputFile, caller.name)
}
cv.mainFunction = true
add(cv)
addStats(finalFile, "final")
......
......@@ -30,13 +30,17 @@ class Breakdancer(val root: Configurable) extends SvCaller {
logger.debug("Starting Breakdancer configuration")
val bdcfg = BreakdancerConfig(this, bamFile, new File(breakdancerSampleDir, sample + ".breakdancer.cfg"))
bdcfg.mainFunction = true
val breakdancer = BreakdancerCaller(this, bdcfg.output, new File(breakdancerSampleDir, sample + ".breakdancer.tsv"))
breakdancer.mainFunction = true
val bdvcf = BreakdancerVCF(this, breakdancer.output, new File(breakdancerSampleDir, sample + ".breakdancer.vcf"),
sample = sample + sampleNameSuffix)
bdvcf.mainFunction = true
val compressedVCF = new SortVcf(this)
compressedVCF.input = bdvcf.output
compressedVCF.output = new File(breakdancerSampleDir, s"${sample}.breakdancer.vcf.gz")
compressedVCF.mainFunction = true
add(bdcfg, breakdancer, bdvcf, compressedVCF)
......
......@@ -27,6 +27,7 @@ class Clever(val root: Configurable) extends SvCaller {
for ((sample, bamFile) <- inputBams) {
val cleverDir = new File(outputDir, sample)
val clever = CleverCaller(this, bamFile, cleverDir)
clever.mainFunction = true
add(clever)
val cleverVCF = new CleverFixVCF(this)
......@@ -34,11 +35,13 @@ class Clever(val root: Configurable) extends SvCaller {
cleverVCF.output = new File(cleverDir, s".${sample}.clever.vcf")
cleverVCF.sampleName = sample + sampleNameSuffix
cleverVCF.isIntermediate = true
cleverVCF.mainFunction = true
add(cleverVCF)
val compressedVCF = new SortVcf(this)
compressedVCF.input = cleverVCF.output
compressedVCF.output = new File(cleverDir, s"${sample}.clever.vcf.gz")
compressedVCF.mainFunction = true
add(compressedVCF)
addVCF(sample, compressedVCF.output)
......
......@@ -43,6 +43,7 @@ class Delly(val root: Configurable) extends SvCaller {
delly.analysistype = "DEL"
delly.isIntermediate = true
delly.outputvcf = new File(dellyDir, sample + ".delly.del.vcf")
delly.mainFunction = true
add(delly)
catVariants.variant :+= delly.outputvcf
......@@ -53,6 +54,7 @@ class Delly(val root: Configurable) extends SvCaller {
delly.analysistype = "DUP"
delly.isIntermediate = true
delly.outputvcf = new File(dellyDir, sample + ".delly.dup.vcf")
delly.mainFunction = true
add(delly)
catVariants.variant :+= delly.outputvcf
......@@ -63,6 +65,7 @@ class Delly(val root: Configurable) extends SvCaller {
delly.analysistype = "INV"
delly.isIntermediate = true
delly.outputvcf = new File(dellyDir, sample + ".delly.inv.vcf")
delly.mainFunction = true
add(delly)
catVariants.variant :+= delly.outputvcf
......@@ -73,6 +76,7 @@ class Delly(val root: Configurable) extends SvCaller {
delly.analysistype = "TRA"
delly.isIntermediate = true
delly.outputvcf = new File(dellyDir, sample + ".delly.tra.vcf")
delly.mainFunction = true
add(delly)
catVariants.variant :+= delly.outputvcf
......@@ -80,11 +84,13 @@ class Delly(val root: Configurable) extends SvCaller {
if (catVariants.variant.isEmpty) Logging.addError("At least 1 SV-type should be selected for Delly")
catVariants.mainFunction = true
add(catVariants)
val compressedVCF = new SortVcf(this)
compressedVCF.input = catVariants.outputFile
compressedVCF.output = catVariants.outputFile + ".gz"
compressedVCF.mainFunction = true
add(compressedVCF)
addVCF(sample, compressedVCF.output)
......
......@@ -41,9 +41,11 @@ class Pindel(val root: Configurable) extends SvCaller {
cfg.input = bamFile
cfg.sampleName = sample + sampleNameSuffix
cfg.output = configFile
cfg.mainFunction = true
add(cfg)
val pindel = PindelCaller(this, cfg.output, pindelDir)
pindel.mainFunction = true
add(pindel)
// Current date
......@@ -56,11 +58,13 @@ class Pindel(val root: Configurable) extends SvCaller {
pindelVcf.pindelOutputRoot = Some(new File(pindelDir, "sample"))
pindelVcf.rDate = todayformat.format(today) // officially, we should enter the date of the genome here
pindelVcf.outputVCF = new File(pindelDir, s"${sample}.pindel.vcf")
pindelVcf.mainFunction = true
add(pindelVcf)
val compressedVCF = new SortVcf(this)
compressedVCF.input = pindelVcf.outputVCF
compressedVCF.output = new File(pindelDir, s"${sample}.pindel.vcf.gz")
compressedVCF.mainFunction = true
add(compressedVCF)
addVCF(sample, compressedVCF.output)
......
......@@ -36,7 +36,9 @@ class Bcftools(val root: Configurable) extends Variantcaller {
bt.v = true
bt.c = true
add(mp | new FixMpileup(this) | bt > outputFile)
val pipe = mp | new FixMpileup(this) | bt > outputFile
pipe.mainFunction = true
add(pipe)
add(Tabix(this, outputFile))
}
}
......@@ -38,7 +38,9 @@ class BcftoolsSingleSample(val root: Configurable) extends Variantcaller {
bt.c = true
bt.output = new File(outputDir, sample + ".vcf.gz")
add(mp | new FixMpileup(this) | bt)
val pipe = mp | new FixMpileup(this) | bt
pipe.mainFunction = true
add(pipe)
add(Tabix(this, bt.output))
bt.output
}
......@@ -48,6 +50,7 @@ class BcftoolsSingleSample(val root: Configurable) extends Variantcaller {
bcfmerge.input = sampleVcfs
bcfmerge.output = outputFile
bcfmerge.O = Some("z")
bcfmerge.mainFunction = true
add(bcfmerge)
} else add(Ln.apply(this, sampleVcfs.head, outputFile))
add(Tabix(this, outputFile))
......
......@@ -28,7 +28,9 @@ class Freebayes(val root: Configurable) extends Variantcaller {
val fb = new nl.lumc.sasc.biopet.extensions.Freebayes(this)
fb.bamfiles = inputBams.values.toList
fb.outputVcf = new File(outputDir, namePrefix + ".freebayes.vcf")