Commit b4e999f0 authored by Peter van 't Hof's avatar Peter van 't Hof

Invert option, true is default now

parent f602182e
......@@ -36,7 +36,7 @@ trait BiopetCommandLineFunction extends CommandLineResources { biopetFunction =>
var executable: String = _
var mainFunction = false
var mainFunction = true
/** This is the default shell for drmaa jobs */
def defaultRemoteCommand = "bash"
......
......@@ -88,7 +88,6 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
case q: MultiSampleQScript if q.onlySamples.nonEmpty && !q.samples.forall(x => q.onlySamples.contains(x._1)) =>
logger.info("Write report is skipped because sample flag is used")
case _ => reportClass.foreach { report =>
report.mainFunction = true
add(report)
}
}
......
......@@ -22,6 +22,8 @@ import org.broadinstitute.gatk.utils.commandline.{ Input, Output }
/** Extension for md5sum */
class Md5sum(val root: Configurable) extends BiopetCommandLineFunction with Version {
mainFunction = false
@Input(doc = "Input")
var input: File = _
......
......@@ -30,6 +30,8 @@ import org.broadinstitute.gatk.utils.commandline.{ Output, Input }
class SeqStat(val root: Configurable) extends ToolCommandFunction with Summarizable {
def toolObject = nl.lumc.sasc.biopet.tools.SeqStat
mainFunction = false
@Input(doc = "Input FASTQ", shortName = "input", required = true)
var input: File = null
......
......@@ -32,6 +32,8 @@ import scala.io.Source
class VcfStats(val root: Configurable) extends ToolCommandFunction with Summarizable with Reference {
def toolObject = nl.lumc.sasc.biopet.tools.VcfStats
mainFunction = false
@Input(doc = "Input fastq", shortName = "I", required = true)
var input: File = _
......
......@@ -75,7 +75,6 @@ class Carp(val root: Configurable) extends QScript with MultisampleMappingTrait
samtoolsView.output = preProcessBam.get
samtoolsView.b = true
samtoolsView.h = true
samtoolsView.mainFunction = true
add(samtoolsView)
val bamMetricsFilter = BamMetrics(qscript, preProcessBam.get, new File(sampleDir, "metrics-filter"), sampleId = Some(sampleId))
......@@ -95,7 +94,6 @@ class Carp(val root: Configurable) extends QScript with MultisampleMappingTrait
macs2.name = Some(sampleId)
macs2.outputdir = sampleDir + File.separator + "macs2" + File.separator + sampleId + File.separator
macs2.fileformat = if (paired) Some("BAMPE") else Some("BAM")
macs2.mainFunction = true
add(macs2)
}
}
......@@ -133,7 +131,6 @@ class Carp(val root: Configurable) extends QScript with MultisampleMappingTrait
macs2.name = Some(sampleId + "_VS_" + controlId)
macs2.fileformat = if (paired) Some("BAMPE") else Some("BAM")
macs2.outputdir = sample.sampleDir + File.separator + "macs2" + File.separator + macs2.name.get + File.separator
macs2.mainFunction = true
add(macs2)
}
}
......
......@@ -113,7 +113,6 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
if (paired) outputFiles += ("fastq_input_R2" -> inputR2.get)
fastqcR1 = Fastqc(this, inputR1, new File(outputDir, R1Name + ".fastqc/"))
fastqcR1.mainFunction = true
add(fastqcR1)
addSummarizable(fastqcR1, "fastqc_R1")
outputFiles += ("fastqc_R1" -> fastqcR1.output)
......@@ -133,7 +132,6 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
if (paired) {
fastqcR2 = Fastqc(this, inputR2.get, new File(outputDir, R2Name + ".fastqc/"))
fastqcR2.mainFunction = true
add(fastqcR2)
addSummarizable(fastqcR2, "fastqc_R2")
outputFiles += ("fastqc_R2" -> fastqcR2.output)
......@@ -230,7 +228,6 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
pipe.deps ::= fastqcR1.output
pipe.deps ::= fastqcR2.output
pipe.isIntermediate = !keepQcFastqFiles
pipe.mainFunction = true
add(pipe)
addSummarizable(fqSync, "fastq_sync")
......
......@@ -60,7 +60,6 @@ class CombineReads(val root: Configurable) extends QScript with SummaryQScript w
flash.fastqR1 = fastqR1
flash.fastqR2 = fastqR2
flash.isIntermediate = (forwardPrimers ::: reversePrimers).nonEmpty
flash.mainFunction = true
add(flash)
if ((forwardPrimers ::: reversePrimers).nonEmpty) {
......@@ -69,7 +68,6 @@ class CombineReads(val root: Configurable) extends QScript with SummaryQScript w
cutadapt.fastqOutput = this.combinedFastq
cutadapt.statsOutput = swapExt(outputDir, cutadapt.fastqOutput, ".fastq.gz", ".stats")
(forwardPrimers ::: reversePrimers).foreach(cutadapt.anywhere += _)
cutadapt.mainFunction = true
add(cutadapt)
addSummarizable(cutadapt, "cutadapt")
}
......
......@@ -56,7 +56,6 @@ class ExtractUnmappedReads(val root: Configurable) extends QScript with BiopetQS
if (paired) samtoolsViewSelectUnmapped.f = List("12")
else samtoolsViewSelectUnmapped.f = List("4")
samtoolsViewSelectUnmapped.isIntermediate = true
samtoolsViewSelectUnmapped.mainFunction = true
add(samtoolsViewSelectUnmapped)
// start bam to fastq (only on unaligned reads) also extract the matesam
......@@ -68,7 +67,6 @@ class ExtractUnmappedReads(val root: Configurable) extends QScript with BiopetQS
samToFastq.fastqUnpaired = fastqUnmappedSingletons
}
samToFastq.isIntermediate = !config("keep_unmapped_fastq", default = false).asBoolean
samToFastq.mainFunction = true
add(samToFastq)
}
}
......@@ -37,7 +37,6 @@ class GearsCentrifuge(val root: Configurable) extends QScript with SummaryQScrip
centrifuge.metFile = Some(centrifugeMetOutput)
val centrifugeCmd = centrifuge | new Gzip(this) > centrifugeOutput
centrifugeCmd.threadsCorrection = -1
centrifugeCmd.mainFunction = true
add(centrifugeCmd)
makeKreport("centrifuge", unique = false)
......@@ -53,14 +52,12 @@ class GearsCentrifuge(val root: Configurable) extends QScript with SummaryQScrip
centrifugeKreport.output = new File(outputDir, s"$outputName.$name.kreport")
centrifugeKreport.onlyUnique = unique
val pipe = new BiopetFifoPipe(this, List(centrifugeKreport, Zcat(this, centrifugeOutput, fifo)))
pipe.mainFunction = true
add(pipe)
val krakenReportJSON = new KrakenReportToJson(this)
krakenReportJSON.inputReport = centrifugeKreport.output
krakenReportJSON.output = new File(outputDir, s"$outputName.$name.krkn.json")
krakenReportJSON.skipNames = config("skipNames", default = false)
krakenReportJSON.mainFunction = true
add(krakenReportJSON)
addSummarizable(krakenReportJSON, s"${name}_report")
}
......
......@@ -52,7 +52,6 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
seqtk.output = new File(outputDir, input.getName + ".fasta")
seqtk.A = true
seqtk.isIntermediate = true
seqtk.mainFunction = true
add(seqtk)
seqtk.output
}
......@@ -72,7 +71,6 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
krakenAnalysis.classifiedOut = Some(new File(outputDir, s"$outputName.krkn.classified.fastq"))
krakenAnalysis.unclassifiedOut = Some(new File(outputDir, s"$outputName.krkn.unclassified.fastq"))
krakenAnalysis.mainFunction = true
add(krakenAnalysis)
outputFiles += ("kraken_output_raw" -> krakenAnalysis.output)
......@@ -84,7 +82,6 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
krakenReport.input = krakenAnalysis.output
krakenReport.showZeros = true
krakenReport.output = new File(outputDir, s"$outputName.krkn.full")
krakenReport.mainFunction = true
add(krakenReport)
outputFiles += ("kraken_report_input" -> krakenReport.input)
......@@ -94,7 +91,6 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
krakenReportJSON.inputReport = krakenReport.output
krakenReportJSON.output = new File(outputDir, s"$outputName.krkn.json")
krakenReportJSON.skipNames = config("skipNames", default = false)
krakenReportJSON.mainFunction = true
add(krakenReportJSON)
addSummarizable(krakenReportJSON, "krakenreport")
......
......@@ -51,13 +51,11 @@ class GearsQiimeOpen(val root: Configurable) extends QScript with SummaryQScript
splitLib.input :+= fastqInput
splitLib.outputDir = new File(outputDir, "split_libraries_fastq")
sampleId.foreach(splitLib.sampleIds :+= _.replaceAll("_", "-"))
splitLib.mainFunction = true
add(splitLib)
val openReference = new PickOpenReferenceOtus(this)
openReference.inputFasta = addDownsample(splitLib.outputSeqs, new File(splitLib.outputDir, s"${sampleId.get}.downsample.fna"))
openReference.outputDir = new File(outputDir, "pick_open_reference_otus")
openReference.mainFunction = true
add(openReference)
_otuMap = openReference.otuMap
_otuTable = openReference.otuTable
......
......@@ -60,7 +60,6 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
slfR1.input :+= fastqR1
slfR1.outputDir = new File(outputDir, "split_libraries_fastq_R1")
sampleId.foreach(slfR1.sampleIds :+= _)
slfR1.mainFunction = true
add(slfR1)
lazy val slfR2 = fastqR2.map { file =>
......@@ -75,7 +74,6 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
val pickOtus = new PickOtus(this)
pickOtus.inputFasta = slfR1.outputSeqs
pickOtus.outputDir = new File(outputDir, "pick_otus")
pickOtus.mainFunction = true
add(pickOtus)
val pickRepSet = new PickRepSet(this)
......@@ -85,7 +83,6 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
pickRepSet.outputFasta = Some(new File(repSetOutputDir, slfR1.outputSeqs.getName))
pickRepSet.logFile = Some(new File(repSetOutputDir, slfR1.outputSeqs.getName
.stripSuffix(".fasta").stripSuffix(".fa").stripSuffix(".fna") + ".log"))
pickRepSet.mainFunction = true
add(pickRepSet)
val assignTaxonomy = new AssignTaxonomy(this)
......@@ -94,7 +91,6 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
assignTaxonomy.inputFasta = pickRepSet.outputFasta.get
assignTaxonomy.read1SeqsFp = Some(slfR1.outputSeqs)
assignTaxonomy.read2SeqsFp = slfR2.map(_.outputSeqs)
assignTaxonomy.mainFunction = true
add(assignTaxonomy)
}
}
......@@ -38,7 +38,6 @@ class GearsSeqCount(val root: Configurable) extends QScript with BiopetQScript w
val seqCount = new SageCountFastq(this)
seqCount.input = fastqInput
seqCount.output = countFile
seqCount.mainFunction = true
add(seqCount)
}
}
......@@ -211,7 +211,6 @@ class Gentrap(val root: Configurable) extends QScript
ribosomalRefFlat().foreach(job.intervalFile = _)
job.outputBam = createFile("cleaned.bam")
job.discardedBam = Some(createFile("rrna.bam"))
job.mainFunction = true
add(job)
Some(job.outputBam)
} else bamFile
......
......@@ -35,7 +35,6 @@ class BaseCounts(val root: Configurable) extends QScript with Measurement with A
baseCounter.outputDir = new File(outputDir, id)
baseCounter.prefix = id
baseCounter.refFlat = annotationRefFlat()
baseCounter.mainFunction = true
add(baseCounter)
id -> baseCounter
}
......
......@@ -33,7 +33,6 @@ trait CufflinksMeasurement extends QScript with Measurement {
val jobs = bamFiles.map {
case (id, file) =>
val cufflinks = makeCufflinksJob(id, file)
cufflinks.mainFunction = true
add(cufflinks)
id -> cufflinks
}
......
......@@ -41,7 +41,6 @@ class FragmentsPerGene(val root: Configurable) extends QScript with Measurement
sortSam.output = swapExt(outputDir, file, ".bam", ".idsorted.bam")
sortSam.sortOrder = "queryname"
sortSam.isIntermediate = true
sortSam.mainFunction = true
add(sortSam)
sortSam.output
} else file
......@@ -52,7 +51,6 @@ class FragmentsPerGene(val root: Configurable) extends QScript with Measurement
job.output = new File(outputDir, s"$id.$name.counts")
job.format = Option("bam")
job.order = if (sortOnId) Some("name") else Some("pos")
job.mainFunction = true
add(job)
id -> job
}
......
......@@ -200,7 +200,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
if (chunking) {
val fastSplitterR1 = new FastqSplitter(this)
fastSplitterR1.mainFunction = true
fastSplitterR1.input = inputR1
for ((chunkDir, fastqfile) <- chunks) fastSplitterR1.output :+= fastqfile._1
fastSplitterR1.isIntermediate = true
......@@ -208,7 +207,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
if (paired) {
val fastSplitterR2 = new FastqSplitter(this)
fastSplitterR2.mainFunction = true
fastSplitterR2.input = inputR2.get
for ((chunkDir, fastqfile) <- chunks) fastSplitterR2.output :+= fastqfile._2.get
fastSplitterR2.isIntermediate = true
......@@ -257,12 +255,10 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
if (!skipMarkduplicates) {
bamFile = new File(outputDir, outputName + ".dedup.bam")
val md = MarkDuplicates(this, bamFiles, bamFile)
md.mainFunction = true
add(md)
addSummarizable(md, "mark_duplicates")
} else if (skipMarkduplicates && chunking) {
val mergeSamFile = MergeSamFiles(this, bamFiles, new File(outputDir, outputName + ".merge.bam"))
mergeSamFile.mainFunction = true
add(mergeSamFile)
bamFile = mergeSamFile.output
}
......@@ -301,7 +297,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
bwaAlnR1.fastq = R1
bwaAlnR1.output = swapExt(output.getParent, output, ".bam", ".R1.sai")
bwaAlnR1.isIntermediate = true
bwaAlnR1.mainFunction = true
add(bwaAlnR1)
val samFile: File = if (paired) {
......@@ -309,7 +304,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
bwaAlnR2.fastq = R2.get
bwaAlnR2.output = swapExt(output.getParent, output, ".bam", ".R2.sai")
bwaAlnR2.isIntermediate = true
bwaAlnR2.mainFunction = true
add(bwaAlnR2)
val bwaSampe = new BwaSampe(this)
......@@ -320,7 +314,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
bwaSampe.r = getReadGroupBwa
bwaSampe.output = swapExt(output.getParent, output, ".bam", ".sam")
bwaSampe.isIntermediate = true
bwaSampe.mainFunction = true
add(bwaSampe)
bwaSampe.output
......@@ -331,7 +324,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
bwaSamse.r = getReadGroupBwa
bwaSamse.output = swapExt(output.getParent, output, ".bam", ".sam")
bwaSamse.isIntermediate = true
bwaSamse.mainFunction = true
add(bwaSamse)
bwaSamse.output
......@@ -339,7 +331,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val sortSam = SortSam(this, samFile, output)
if (chunking || !skipMarkduplicates) sortSam.isIntermediate = true
sortSam.mainFunction = true
add(sortSam)
sortSam.output
}
......@@ -355,7 +346,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val pipe = bwaCommand | sortSam
pipe.isIntermediate = chunking || !skipMarkduplicates
pipe.threadsCorrection = -1
pipe.mainFunction = true
add(pipe)
output
}
......@@ -372,7 +362,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val ar = addAddOrReplaceReadGroups(reorderSam.output, output)
val pipe = new BiopetFifoPipe(this, gsnapCommand :: ar._1 :: reorderSam :: Nil)
pipe.threadsCorrection = -2
pipe.mainFunction = true
add(pipe)
ar._2
}
......@@ -398,7 +387,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val pipe = hisat2 | sortSam
pipe.isIntermediate = chunking || !skipMarkduplicates
pipe.threadsCorrection = 1
pipe.mainFunction = true
add(pipe)
output
......@@ -414,7 +402,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
tophat.noConvertBam = false
// and always keep input ordering
tophat.keepFastaOrder = true
tophat.mainFunction = true
add(tophat)
// fix unmapped file coordinates
......@@ -429,7 +416,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val sorter = SortSam(this, fixer.outputSam, new File(tophat.outputDir, "unmapped_fixup.sorted.bam"))
sorter.sortOrder = "coordinate"
sorter.isIntermediate = true
sorter.mainFunction
add(sorter)
// merge with mapped file
......@@ -470,11 +456,9 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
stampyCmd.sanger = true
stampyCmd.output = this.swapExt(output.getParentFile, output, ".bam", ".sam")
stampyCmd.isIntermediate = true
stampyCmd.mainFunction = true
add(stampyCmd)
val sortSam = SortSam(this, stampyCmd.output, output)
if (chunking || !skipMarkduplicates) sortSam.isIntermediate = true
sortSam.mainFunction = true
add(sortSam)
sortSam.output
}
......@@ -493,7 +477,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val ar = addAddOrReplaceReadGroups(bowtie.output, output)
val pipe = new BiopetFifoPipe(this, (Some(bowtie) :: Some(ar._1) :: Nil).flatten)
pipe.threadsCorrection = -1
pipe.mainFunction = true
add(pipe)
ar._2
}
......@@ -513,7 +496,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
val pipe = bowtie2 | sortSam
pipe.isIntermediate = chunking || !skipMarkduplicates
pipe.threadsCorrection = -1
pipe.mainFunction = true
add(pipe)
output
}
......@@ -534,7 +516,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
pipe.threadsCorrection = -3
zcatR1._1.foreach(x => pipe.threadsCorrection -= 1)
zcatR2.foreach(_._1.foreach(x => pipe.threadsCorrection -= 1))
pipe.mainFunction = true
add(pipe)
reorderSam.output
}
......@@ -547,7 +528,6 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
zcatR2.foreach(_._1.foreach(add(_)))
val starCommand = Star._2pass(this, zcatR1._2, zcatR2.map(_._2), outputDir, isIntermediate = true)
starCommand._2.foreach(_.mainFunction = true)
addAll(starCommand._2)
val ar = addAddOrReplaceReadGroups(starCommand._1, output)
add(ar._1)
......
......@@ -217,12 +217,10 @@ class Shiva(val root: Configurable) extends QScript with MultisampleMappingTrait
def addIndelRealign(inputBam: File, dir: File, isIntermediate: Boolean): File = {
val realignerTargetCreator = RealignerTargetCreator(this, inputBam, dir)
realignerTargetCreator.isIntermediate = true
realignerTargetCreator.mainFunction = true
add(realignerTargetCreator)
val indelRealigner = IndelRealigner(this, inputBam, realignerTargetCreator.out, dir)
indelRealigner.isIntermediate = isIntermediate
indelRealigner.mainFunction = true
add(indelRealigner)
indelRealigner.out
......@@ -233,7 +231,6 @@ class Shiva(val root: Configurable) extends QScript with MultisampleMappingTrait
val baseRecalibrator = BaseRecalibrator(this, inputBam, swapExt(dir, inputBam, ".bam", ".baserecal"))
if (baseRecalibrator.knownSites.isEmpty) return inputBam
baseRecalibrator.mainFunction = true
add(baseRecalibrator)
if (config("use_analyze_covariates", default = true).asBoolean) {
......@@ -247,7 +244,6 @@ class Shiva(val root: Configurable) extends QScript with MultisampleMappingTrait
val printReads = PrintReads(this, inputBam, swapExt(dir, inputBam, ".bam", ".baserecal.bam"))
printReads.BQSR = Some(baseRecalibrator.out)
printReads.isIntermediate = isIntermediate
printReads.mainFunction = true
add(printReads)
printReads.out
......
......@@ -76,7 +76,6 @@ class ShivaSvCalling(val root: Configurable) extends QScript with SummaryQScript
val mergeSVcalls = new Pysvtools(this)
mergeSVcalls.input = sampleVCFS.flatten
mergeSVcalls.output = new File(outputDir, sample + ".merged.vcf")
mergeSVcalls.mainFunction = true
add(mergeSVcalls)
outputMergedVCFbySample += (sample -> mergeSVcalls.output)
}
......@@ -85,7 +84,6 @@ class ShivaSvCalling(val root: Configurable) extends QScript with SummaryQScript
val mergeSVcallsProject = new Pysvtools(this)
mergeSVcallsProject.input = outputMergedVCFbySample.values.toList
mergeSVcallsProject.output = outputMergedVCF
mergeSVcallsProject.mainFunction = true
add(mergeSVcallsProject)
// merging the VCF calls by project
......
......@@ -106,27 +106,22 @@ class ShivaVariantcalling(val root: Configurable) extends QScript
if (normalize && decompose) {
vtNormalize.outputVcf = swapExt(caller.outputDir, caller.outputFile, ".vcf.gz", ".normalized.vcf.gz")
vtNormalize.isIntermediate = true
vtNormalize.mainFunction = true
add(vtNormalize, Tabix(this, vtNormalize.outputVcf))
vtDecompose.inputVcf = vtNormalize.outputVcf
vtDecompose.outputVcf = swapExt(caller.outputDir, vtNormalize.outputVcf, ".vcf.gz", ".decompose.vcf.gz")
vtDecompose.mainFunction = true
add(vtDecompose, Tabix(this, vtDecompose.outputVcf))
cv.variant :+= TaggedFile(vtDecompose.outputVcf, caller.name)
} else if (normalize && !decompose) {
vtNormalize.outputVcf = swapExt(caller.outputDir, caller.outputFile, ".vcf.gz", ".normalized.vcf.gz")
vtNormalize.mainFunction = true
add(vtNormalize, Tabix(this, vtNormalize.outputVcf))
cv.variant :+= TaggedFile(vtNormalize.outputVcf, caller.name)
} else if (!normalize && decompose) {
vtDecompose.inputVcf = caller.outputFile
vtDecompose.outputVcf = swapExt(caller.outputDir, caller.outputFile, ".vcf.gz", ".decompose.vcf.gz")
vtDecompose.mainFunction = true
add(vtDecompose, Tabix(this, vtDecompose.outputVcf))
cv.variant :+= TaggedFile(vtDecompose.outputVcf, caller.name)
} else cv.variant :+= TaggedFile(caller.outputFile, caller.name)
}
cv.mainFunction = true
add(cv)
addStats(finalFile, "final")
......
......@@ -30,17 +30,13 @@ class Breakdancer(val root: Configurable) extends SvCaller {
logger.debug("Starting Breakdancer configuration")
val bdcfg = BreakdancerConfig(this, bamFile, new File(breakdancerSampleDir, sample + ".breakdancer.cfg"))
bdcfg.mainFunction = true
val breakdancer = BreakdancerCaller(this, bdcfg.output, new File(breakdancerSampleDir, sample + ".breakdancer.tsv"))
breakdancer.mainFunction = true
val bdvcf = BreakdancerVCF(this, breakdancer.output, new File(breakdancerSampleDir, sample + ".breakdancer.vcf"),
sample = sample + sampleNameSuffix)
bdvcf.mainFunction = true
val compressedVCF = new SortVcf(this)
compressedVCF.input = bdvcf.output
compressedVCF.output = new File(breakdancerSampleDir, s"${sample}.breakdancer.vcf.gz")
compressedVCF.mainFunction = true
add(bdcfg, breakdancer, bdvcf, compressedVCF)
......
......@@ -27,7 +27,6 @@ class Clever(val root: Configurable) extends SvCaller {
for ((sample, bamFile) <- inputBams) {
val cleverDir = new File(outputDir, sample)
val clever = CleverCaller(this, bamFile, cleverDir)
clever.mainFunction = true
add(clever)
val cleverVCF = new CleverFixVCF(this)
......@@ -35,13 +34,11 @@ class Clever(val root: Configurable) extends SvCaller {
cleverVCF.output = new File(cleverDir, s".${sample}.clever.vcf")
cleverVCF.sampleName = sample + sampleNameSuffix
cleverVCF.isIntermediate = true
cleverVCF.mainFunction = true
add(cleverVCF)
val compressedVCF = new SortVcf(this)
compressedVCF.input = cleverVCF.output
compressedVCF.output = new File(cleverDir, s"${sample}.clever.vcf.gz")
compressedVCF.mainFunction = true
add(compressedVCF)
addVCF(sample, compressedVCF.output)
......
......@@ -43,7 +43,6 @@ class Delly(val root: Configurable) extends SvCaller {
delly.analysistype = "DEL"
delly.isIntermediate = true
delly.outputvcf = new File(dellyDir, sample + ".delly.del.vcf")
delly.mainFunction = true
add(delly)
catVariants.variant :+= delly.outputvcf
......@@ -54,7 +53,6 @@ class Delly(val root: Configurable) extends SvCaller {
delly.analysistype = "DUP"
delly.isIntermediate = true
delly.outputvcf = new File(dellyDir, sample + ".delly.dup.vcf")
delly.mainFunction = true
add(delly)
catVariants.variant :+= delly.outputvcf
......@@ -65,7 +63,6 @@ class Delly(val root: Configurable) extends SvCaller {
delly.analysistype = "INV"
delly.isIntermediate = true
delly.outputvcf = new File(dellyDir, sample + ".delly.inv.vcf")
delly.mainFunction = true
add(delly)
catVariants.variant :+= delly.outputvcf
......@@ -76,7 +73,6 @@ class Delly(val root: Configurable) extends SvCaller {
delly.analysistype = "TRA"
delly.isIntermediate = true
delly.outputvcf = new File(dellyDir, sample + ".delly.tra.vcf")
delly.mainFunction = true
add(delly)
catVariants.variant :+= delly.outputvcf
......@@ -84,13 +80,11 @@ class Delly(val root: Configurable) extends SvCaller {
if (catVariants.variant.isEmpty) Logging.addError("At least 1 SV-type should be selected for Delly")
catVariants.mainFunction = true
add(catVariants)
val compressedVCF = new SortVcf(this)
compressedVCF.input = catVariants.outputFile
compressedVCF.output = catVariants.outputFile + ".gz"
compressedVCF.mainFunction = true
add(compressedVCF)
addVCF(sample, compressedVCF.output)
......
......@@ -41,11 +41,9 @@ class Pindel(val root: Configurable) extends SvCaller {
cfg.input = bamFile
cfg.sampleName = sample + sampleNameSuffix
cfg.output = configFile
cfg.mainFunction = true
add(cfg)
val pindel = PindelCaller(this, cfg.output, pindelDir)
pindel.mainFunction = true
add(pindel)
// Current date
......@@ -58,13 +56,11 @@ class Pindel(val root: Configurable) extends SvCaller {
pindelVcf.pindelOutputRoot = Some(new File(pindelDir, "sample"))
pindelVcf.rDate = todayformat.format(today) // officially, we should enter the date of the genome here
pindelVcf.outputVCF = new File(pindelDir, s"${sample}.pindel.vcf")
pindelVcf.mainFunction = true
add(pindelVcf)
val compressedVCF = new SortVcf(this)
compressedVCF.input = pindelVcf.outputVCF
compressedVCF.output = new File(pindelDir, s"${sample}.pindel.vcf.gz")
compressedVCF.mainFunction = true
add(compressedVCF)
addVCF(sample, compressedVCF.output)
......
......@@ -37,7 +37,6 @@ class Bcftools(val root: Configurable) extends Variantcaller {
bt.c = true
val pipe = mp | new FixMpileup(this) | bt > outputFile
pipe.mainFunction = true
add(pipe)
add(Tabix(this, outputFile))
}
......
......@@ -39,7 +39,6 @@ class BcftoolsSingleSample(val root: Configurable) extends Variantcaller {
bt.output = new File(outputDir, sample + ".vcf.gz")
val pipe = mp | new FixMpileup(this) | bt
pipe.mainFunction = true
add(pipe)
add(Tabix(this, bt.output))
bt.output
......@@ -50,7 +49,6 @@ class BcftoolsSingleSample(val root: Configurable) extends Variantcaller {
bcfmerge.input = sampleVcfs
bcfmerge.output = outputFile
bcfmerge.O = Some("z")
bcfmerge.mainFunction = true
add(bcfmerge)
} else add(Ln.apply(this, sampleVcfs.head, outputFile))
add(Tabix(this, outputFile))
......
......@@ -29,7 +29,6 @@ class Freebayes(val root: Configurable) extends Variantcaller {
fb.bamfiles = inputBams.values.toList
fb.outputVcf = new File(outputDir, namePrefix + ".freebayes.vcf")
val pipe = fb | new Bgzip(this) > outputFile
pipe.mainFunction = true
add(pipe)
add(Tabix.apply(this, outputFile))
......
......@@ -29,7 +29,6 @@ class HaplotypeCaller(val root: Configurable) extends Variantcaller {
def biopetScript() {
val hc = gatk.HaplotypeCaller(this, inputBams.values.toList, outputFile)
hc.mainFunction = true
add(hc)
}
}
......
......@@ -34,7 +34,6 @@ class HaplotypeCallerAllele(val root: Configurable) extends Variantcaller {
val hc = gatk.HaplotypeCaller(this, inputBams.values.toList, outputFile)
hc.alleles = Some(alleles)
hc.genotyping_mode = Some("GENOTYPE_GIVEN_ALLELES")
hc.mainFunction = true
add(hc)
}
}