Commit 6aa0a3d3 authored by Wai Yi Leung's avatar Wai Yi Leung
Browse files

Merge branch 'feature-improved_bammetrics' into 'develop'

Feature improved bammetrics

see #127

See merge request !158
parents da6c129f b810fba2
......@@ -39,5 +39,17 @@
<artifactId>BiopetFramework</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_2.11</artifactId>
<version>2.2.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>
......@@ -18,12 +18,12 @@ package nl.lumc.sasc.biopet.pipelines.bammetrics
import nl.lumc.sasc.biopet.core.summary.SummaryQScript
import nl.lumc.sasc.biopet.scripts.CoverageStats
import org.broadinstitute.gatk.queue.QScript
import nl.lumc.sasc.biopet.core.{ SampleLibraryTag, BiopetQScript, PipelineCommand }
import nl.lumc.sasc.biopet.core.{ SampleLibraryTag, PipelineCommand }
import java.io.File
import nl.lumc.sasc.biopet.tools.{ BedToInterval, BiopetFlagstat }
import nl.lumc.sasc.biopet.tools.BiopetFlagstat
import nl.lumc.sasc.biopet.core.config.Configurable
import nl.lumc.sasc.biopet.extensions.bedtools.{ BedtoolsCoverage, BedtoolsIntersect }
import nl.lumc.sasc.biopet.extensions.picard.{ CollectInsertSizeMetrics, CollectGcBiasMetrics, CalculateHsMetrics, CollectAlignmentSummaryMetrics }
import nl.lumc.sasc.biopet.extensions.picard._
import nl.lumc.sasc.biopet.extensions.samtools.SamtoolsFlagstat
class BamMetrics(val root: Configurable) extends QScript with SummaryQScript with SampleLibraryTag {
......@@ -32,14 +32,13 @@ class BamMetrics(val root: Configurable) extends QScript with SummaryQScript wit
@Input(doc = "Bam File", shortName = "BAM", required = true)
var inputBam: File = _
@Input(doc = "Bed tracks targets", shortName = "target", required = false)
var bedFiles: List[File] = Nil
/** Bed files for region of interests */
var roiBedFiles: List[File] = config("regions_of_interest", Nil)
@Input(doc = "Bed tracks bait", shortName = "bait", required = false)
var baitBedFile: File = _
/** Bed of amplicon that is used */
var ampliconBedFile: Option[File] = config("amplicon_bed")
@Argument(doc = "", required = false)
var wholeGenome = false
var rnaMetrics: Boolean = config("rna_metrcis", default = false)
/** return location of summary file */
def summaryFile = (sampleId, libId) match {
......@@ -49,19 +48,16 @@ class BamMetrics(val root: Configurable) extends QScript with SummaryQScript wit
}
/** returns files to store in summary */
def summaryFiles = Map("input_bam" -> inputBam)
def summaryFiles = Map("input_bam" -> inputBam) ++
ampliconBedFile.map("amplicon" -> _).toMap ++
ampliconBedFile.map(x => "roi_" + x.getName.stripSuffix(".bed") -> x).toMap
/** return settings */
def summarySettings = Map()
def summarySettings = Map("amplicon_name" -> ampliconBedFile.collect { case x => x.getName.stripSuffix(".bed") },
"roi_name" -> roiBedFiles.map(_.getName.stripSuffix(".bed")))
/** executed before script */
def init() {
if (config.contains("target_bed")) {
for (file <- config("target_bed").asList) {
bedFiles +:= new File(file.toString)
}
}
if (baitBedFile == null && config.contains("target_bait")) baitBedFile = config("target_bait")
}
/** Script to add jobs */
......@@ -72,42 +68,96 @@ class BamMetrics(val root: Configurable) extends QScript with SummaryQScript wit
add(biopetFlagstat)
addSummarizable(biopetFlagstat, "biopet_flagstat")
add(CollectGcBiasMetrics(this, inputBam, outputDir))
val collectInsertSizeMetrics = CollectInsertSizeMetrics(this, inputBam, outputDir)
add(collectInsertSizeMetrics)
addSummarizable(collectInsertSizeMetrics, "insert_size_metrics")
val collectAlignmentSummaryMetrics = CollectAlignmentSummaryMetrics(this, inputBam, outputDir)
add(collectAlignmentSummaryMetrics)
addSummarizable(collectAlignmentSummaryMetrics, "alignment_metrics")
val baitIntervalFile = if (baitBedFile != null) new File(outputDir, baitBedFile.getName.stripSuffix(".bed") + ".interval") else null
if (baitIntervalFile != null)
add(BedToInterval(this, baitBedFile, inputBam, outputDir), true)
for (bedFile <- bedFiles) {
//TODO: Add target jobs to summary
val targetDir = new File(outputDir, bedFile.getName.stripSuffix(".bed"))
val targetFile = new File(targetDir, bedFile.getName.stripSuffix(".bed") + ".interval")
val targetInterval = BedToInterval(this, bedFile, inputBam, targetFile)
add(targetInterval, true)
add(CalculateHsMetrics(this, inputBam, if (baitIntervalFile != null) baitIntervalFile
else targetInterval.output, targetInterval.output, targetDir))
val multiMetrics = new CollectMultipleMetrics(this)
multiMetrics.input = inputBam
multiMetrics.outputName = new File(outputDir, inputBam.getName.stripSuffix(".bam"))
add(multiMetrics)
addSummarizable(multiMetrics, "multi_metrics")
val gcBiasMetrics = CollectGcBiasMetrics(this, inputBam, outputDir)
add(gcBiasMetrics)
addSummarizable(gcBiasMetrics, "gc_bias")
val wgsMetrics = new CollectWgsMetrics(this)
wgsMetrics.input = inputBam
wgsMetrics.output = swapExt(outputDir, inputBam, ".bam", ".rna.metrics")
add(wgsMetrics)
addSummarizable(wgsMetrics, "wgs")
if (rnaMetrics) {
val rnaMetrics = new CollectRnaSeqMetrics(this)
rnaMetrics.input = inputBam
rnaMetrics.output = swapExt(outputDir, inputBam, ".bam", ".rna.metrics")
rnaMetrics.chartOutput = Some(swapExt(outputDir, inputBam, ".bam", ".rna.metrics.pdf"))
add(rnaMetrics)
addSummarizable(rnaMetrics, "rna")
}
val strictOutputBam = new File(targetDir, inputBam.getName.stripSuffix(".bam") + ".overlap.strict.bam")
add(BedtoolsIntersect(this, inputBam, bedFile, strictOutputBam, minOverlap = config("strictintersectoverlap", default = 1.0)), true)
add(SamtoolsFlagstat(this, strictOutputBam, targetDir))
add(BiopetFlagstat(this, strictOutputBam, targetDir))
case class Intervals(bed: File, intervals: File)
// Create temp jobs to convert bed files to intervals lists
val roiIntervals = roiBedFiles.map(roiBedFile => {
val roiIntervals = swapExt(outputDir, roiBedFile, ".bed", ".intervals")
val ampBedToInterval = BedToIntervalList(this, roiBedFile, roiIntervals)
ampBedToInterval.isIntermediate = true
add(ampBedToInterval)
Intervals(roiBedFile, roiIntervals)
})
// Metrics that require a amplicon bed file
val ampIntervals = ampliconBedFile.collect {
case ampliconBedFile => {
val ampIntervals = swapExt(outputDir, ampliconBedFile, ".bed", ".intervals")
val ampBedToInterval = BedToIntervalList(this, ampliconBedFile, ampIntervals)
ampBedToInterval.isIntermediate = true
add(ampBedToInterval)
val chsMetrics = CalculateHsMetrics(this, inputBam,
List(ampIntervals), ampIntervals :: roiIntervals.map(_.intervals), outputDir)
add(chsMetrics)
addSummarizable(chsMetrics, "hs_metrics")
val pcrMetrics = CollectTargetedPcrMetrics(this, inputBam,
ampIntervals, ampIntervals :: roiIntervals.map(_.intervals), outputDir)
add(pcrMetrics)
addSummarizable(chsMetrics, "targeted_pcr_metrics")
Intervals(ampliconBedFile, ampIntervals)
}
}
val looseOutputBam = new File(targetDir, inputBam.getName.stripSuffix(".bam") + ".overlap.loose.bam")
add(BedtoolsIntersect(this, inputBam, bedFile, looseOutputBam, minOverlap = config("looseintersectoverlap", default = 0.01)), true)
add(SamtoolsFlagstat(this, looseOutputBam, targetDir))
add(BiopetFlagstat(this, looseOutputBam, targetDir))
// Create stats and coverage plot for each bed/interval file
for (intervals <- roiIntervals ++ ampIntervals) {
val targetName = intervals.bed.getName.stripSuffix(".bed")
val targetDir = new File(outputDir, targetName)
val biStrict = BedtoolsIntersect(this, inputBam, intervals.bed,
output = new File(targetDir, inputBam.getName.stripSuffix(".bam") + ".overlap.strict.bam"),
minOverlap = config("strict_intersect_overlap", default = 1.0))
biStrict.isIntermediate = true
add(biStrict)
add(SamtoolsFlagstat(this, biStrict.output, targetDir))
val biopetFlagstatStrict = BiopetFlagstat(this, biStrict.output, targetDir)
add(biopetFlagstatStrict)
addSummarizable(biopetFlagstatStrict, targetName + "_biopet_flagstat_strict")
val biLoose = BedtoolsIntersect(this, inputBam, intervals.bed,
output = new File(targetDir, inputBam.getName.stripSuffix(".bam") + ".overlap.loose.bam"),
minOverlap = config("loose_intersect_overlap", default = 0.01))
biLoose.isIntermediate = true
add(biLoose)
add(SamtoolsFlagstat(this, biLoose.output, targetDir))
val biopetFlagstatLoose = BiopetFlagstat(this, biLoose.output, targetDir)
add(biopetFlagstatLoose)
addSummarizable(biopetFlagstatLoose, targetName + "_biopet_flagstat_loose")
val coverageFile = new File(targetDir, inputBam.getName.stripSuffix(".bam") + ".coverage")
add(BedtoolsCoverage(this, inputBam, bedFile, coverageFile, true), true)
add(CoverageStats(this, coverageFile, targetDir))
//FIXME:should use piping
add(BedtoolsCoverage(this, inputBam, intervals.bed, coverageFile, depth = true), true)
val covStats = CoverageStats(this, coverageFile, targetDir)
add(covStats)
addSummarizable(covStats, "cov_stats")
}
addSummaryJobs
......@@ -115,20 +165,14 @@ class BamMetrics(val root: Configurable) extends QScript with SummaryQScript wit
}
object BamMetrics extends PipelineCommand {
/**
* Make default implementation of BamMetrics
* @param root
* @param bamFile
* @param outputDir
* @return
*/
/** Make default implementation of BamMetrics and runs script already */
def apply(root: Configurable, bamFile: File, outputDir: File): BamMetrics = {
val bamMetrics = new BamMetrics(root)
bamMetrics.inputBam = bamFile
bamMetrics.outputDir = outputDir
bamMetrics.init
bamMetrics.biopetScript
return bamMetrics
bamMetrics.init()
bamMetrics.biopetScript()
bamMetrics
}
}
package nl.lumc.sasc.biopet.pipelines.bammetrics
import java.io.File
import com.google.common.io.Files
import nl.lumc.sasc.biopet.core.config.Config
import nl.lumc.sasc.biopet.extensions.bedtools.{ BedtoolsCoverage, BedtoolsIntersect }
import nl.lumc.sasc.biopet.extensions.picard._
import nl.lumc.sasc.biopet.extensions.samtools.SamtoolsFlagstat
import nl.lumc.sasc.biopet.scripts.CoverageStats
import nl.lumc.sasc.biopet.tools.BiopetFlagstat
import nl.lumc.sasc.biopet.utils.ConfigUtils
import org.apache.commons.io.FileUtils
import org.broadinstitute.gatk.queue.QSettings
import org.scalatest.Matchers
import org.scalatest.testng.TestNGSuite
import org.testng.annotations.{ Test, DataProvider, AfterClass }
/**
* Created by pjvan_thof on 4/30/15.
*/
class BamMetricsTest extends TestNGSuite with Matchers {
def initPipeline(map: Map[String, Any]): BamMetrics = {
new BamMetrics() {
override def configName = "bammetrics"
override def globalConfig = new Config(map)
qSettings = new QSettings
qSettings.runName = "test"
}
}
@DataProvider(name = "bammetricsOptions")
def bammetricsOptions = {
val rois = Array(0, 1, 2, 3)
val amplicon = Array(true, false)
val rna = Array(true, false)
for (
rois <- rois;
amplicon <- amplicon;
rna <- rna
) yield Array(rois, amplicon, rna)
}
@Test(dataProvider = "bammetricsOptions")
def testFlexiprep(rois: Int, amplicon: Boolean, rna: Boolean) = {
val map = ConfigUtils.mergeMaps(Map("output_dir" -> BamMetricsTest.outputDir, "rna_metrcis" -> rna
), Map(BamMetricsTest.executables.toSeq: _*)) ++
(if (amplicon) Map("amplicon_bed" -> "amplicon.bed") else Map()) ++
Map("regions_of_interest" -> (1 to rois).map("roi_" + _ + ".bed").toList)
val bammetrics: BamMetrics = initPipeline(map)
bammetrics.inputBam = new File("input.bam")
bammetrics.sampleId = Some("1")
bammetrics.libId = Some("1")
bammetrics.script()
var regions: Int = rois + (if (amplicon) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectRnaSeqMetrics]) shouldBe (if (rna) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectWgsMetrics]) shouldBe 1
bammetrics.functions.count(_.isInstanceOf[CollectMultipleMetrics]) shouldBe 1
bammetrics.functions.count(_.isInstanceOf[CalculateHsMetrics]) shouldBe (if (amplicon) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectTargetedPcrMetrics]) shouldBe (if (amplicon) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[BiopetFlagstat]) shouldBe (1 + (regions * 2))
bammetrics.functions.count(_.isInstanceOf[SamtoolsFlagstat]) shouldBe (1 + (regions * 2))
bammetrics.functions.count(_.isInstanceOf[BedtoolsIntersect]) shouldBe (regions * 2)
bammetrics.functions.count(_.isInstanceOf[BedtoolsCoverage]) shouldBe regions
bammetrics.functions.count(_.isInstanceOf[CoverageStats]) shouldBe regions
}
// remove temporary run directory all tests in the class have been run
@AfterClass
def removeTempOutputDir() = {
FileUtils.deleteDirectory(BamMetricsTest.outputDir)
}
}
object BamMetricsTest {
val outputDir = Files.createTempDir()
val executables = Map(
"refFlat" -> "bla.refFlat",
"reference" -> "reference.fa",
"samtools" -> Map("exe" -> "test"),
"bedtools" -> Map("exe" -> "test")
)
}
......@@ -115,7 +115,7 @@ class WriteSummary(val root: Configurable) extends InProcessFunction with Config
}).foldRight(jobsMap)((a, b) => ConfigUtils.mergeMaps(a, b))
val writer = new PrintWriter(out)
writer.println(ConfigUtils.mapToJson(combinedMap).spaces4)
writer.println(ConfigUtils.mapToJson(combinedMap).nospaces)
writer.close()
}
......
......@@ -55,12 +55,12 @@ class BedtoolsIntersect(val root: Configurable) extends Bedtools {
object BedtoolsIntersect {
/** Returns default bedtools intersect */
def apply(root: Configurable, input: File, intersect: File, output: File,
minOverlap: Double = 0, count: Boolean = false): BedtoolsIntersect = {
minOverlap: Option[Double] = None, count: Boolean = false): BedtoolsIntersect = {
val bedtoolsIntersect = new BedtoolsIntersect(root)
bedtoolsIntersect.input = input
bedtoolsIntersect.intersectFile = intersect
bedtoolsIntersect.output = output
if (minOverlap > 0) bedtoolsIntersect.minOverlap = Option(minOverlap)
bedtoolsIntersect.minOverlap = minOverlap
bedtoolsIntersect.count = count
return bedtoolsIntersect
}
......
......@@ -21,7 +21,7 @@ import org.broadinstitute.gatk.utils.commandline.{ Input, Output, Argument }
/** Extension for picard AddOrReplaceReadGroups */
class AddOrReplaceReadGroups(val root: Configurable) extends Picard {
javaMainClass = "picard.sam.AddOrReplaceReadGroups"
javaMainClass = new picard.sam.AddOrReplaceReadGroups().getClass.getName
@Input(doc = "The input SAM or BAM files to analyze.", required = true)
var input: File = _
......
package nl.lumc.sasc.biopet.extensions.picard
import java.io.File
import nl.lumc.sasc.biopet.core.config.Configurable
import org.broadinstitute.gatk.utils.commandline.{ Output, Input }
/**
* Created by pjvan_thof on 4/15/15.
*/
class BedToIntervalList(val root: Configurable) extends Picard {
javaMainClass = new picard.util.BedToIntervalList().getClass.getName
@Input(doc = "Input bed file", required = true)
var input: File = null
@Input(doc = "Reference dict file", required = true)
var dict: File = new File(config("reference").asString.stripSuffix(".fa").stripSuffix(".fasta") + ".dict")
@Output(doc = "Output interval list", required = true)
var output: File = null
override def commandLine = super.commandLine +
required("SEQUENCE_DICTIONARY=", dict, spaceSeparated = false) +
required("INPUT=", input, spaceSeparated = false) +
required("OUTPUT=", output, spaceSeparated = false)
}
object BedToIntervalList {
def apply(root: Configurable, input: File, output: File): BedToIntervalList = {
val bi = new BedToIntervalList(root)
bi.input = input
bi.output = output
bi
}
}
\ No newline at end of file
......@@ -17,20 +17,21 @@ package nl.lumc.sasc.biopet.extensions.picard
import java.io.File
import nl.lumc.sasc.biopet.core.config.Configurable
import nl.lumc.sasc.biopet.core.summary.Summarizable
import org.broadinstitute.gatk.utils.commandline.{ Input, Output, Argument }
/** Extension for picard CalculateHsMetrics */
class CalculateHsMetrics(val root: Configurable) extends Picard {
javaMainClass = "picard.analysis.directed.CalculateHsMetrics"
class CalculateHsMetrics(val root: Configurable) extends Picard with Summarizable {
javaMainClass = new picard.analysis.directed.CalculateHsMetrics().getClass.getName
@Input(doc = "The input SAM or BAM files to analyze. Must be coordinate sorted.", required = true)
var input: File = _
@Input(doc = "BAIT_INTERVALS", required = true)
var baitIntervals: File = _
var baitIntervals: List[File] = _
@Input(doc = "TARGET_INTERVALS", required = true)
var targetIntervals: File = _
var targetIntervals: List[File] = Nil
@Output(doc = "The output file to write statistics to", required = true)
var output: File = _
......@@ -53,21 +54,31 @@ class CalculateHsMetrics(val root: Configurable) extends Picard {
required("OUTPUT=", output, spaceSeparated = false) +
optional("REFERENCE_SEQUENCE=", reference, spaceSeparated = false) +
repeat("METRIC_ACCUMULATION_LEVEL=", metricAccumulationLevel, spaceSeparated = false) +
required("BAIT_INTERVALS=", baitIntervals, spaceSeparated = false) +
required("TARGET_INTERVALS=", targetIntervals, spaceSeparated = false) +
repeat("BAIT_INTERVALS=", baitIntervals, spaceSeparated = false) +
repeat("TARGET_INTERVALS=", targetIntervals, spaceSeparated = false) +
optional("PER_TARGET_COVERAGE=", perTargetCoverage, spaceSeparated = false) +
optional("BAIT_SET_NAME=", baitSetName, spaceSeparated = false)
/** Returns files for summary */
def summaryFiles: Map[String, File] = Map()
/** Returns stats for summary */
def summaryStats: Map[String, Any] = Picard.getMetrics(output).getOrElse(Map())
}
object CalculateHsMetrics {
/** Returns default CalculateHsMetrics */
def apply(root: Configurable, input: File, baitIntervals: File, targetIntervals: File, outputDir: File): CalculateHsMetrics = {
def apply(root: Configurable,
input: File,
baitIntervals: List[File],
targetIntervals: List[File],
outputDir: File): CalculateHsMetrics = {
val calculateHsMetrics = new CalculateHsMetrics(root)
calculateHsMetrics.input = input
calculateHsMetrics.baitIntervals = baitIntervals
calculateHsMetrics.targetIntervals = targetIntervals
calculateHsMetrics.output = new File(outputDir, input.getName.stripSuffix(".bam") + ".capmetrics")
calculateHsMetrics.perTargetCoverage = new File(outputDir, input.getName.stripSuffix(".bam") + ".per_target_coverage")
return calculateHsMetrics
calculateHsMetrics.output = new File(outputDir, input.getName.stripSuffix(".bam") + ".HsMetrics")
calculateHsMetrics.perTargetCoverage = new File(outputDir, input.getName.stripSuffix(".bam") + ".HsMetrics.per_target_coverage")
calculateHsMetrics
}
}
\ No newline at end of file
......@@ -22,7 +22,7 @@ import org.broadinstitute.gatk.utils.commandline.{ Input, Output, Argument }
/** Extension for picard CollectAlignmentSummaryMetrics */
class CollectAlignmentSummaryMetrics(val root: Configurable) extends Picard with Summarizable {
javaMainClass = "picard.analysis.CollectAlignmentSummaryMetrics"
javaMainClass = new picard.analysis.CollectAlignmentSummaryMetrics().getClass.getName
@Input(doc = "The input SAM or BAM files to analyze. Must be coordinate sorted.", required = true)
var input: File = _
......@@ -66,19 +66,7 @@ class CollectAlignmentSummaryMetrics(val root: Configurable) extends Picard with
def summaryFiles: Map[String, File] = Map()
/** Returns stats for summary */
def summaryStats: Map[String, Any] = Picard.getMetrics(output) match {
case None => Map()
case Some((header, content)) =>
(for (category <- 0 until content.size) yield {
content(category)(0).toString -> (
for (
i <- 1 until header.size if i < content(category).size
) yield {
header(i).toLowerCase -> content(category)(i)
}).toMap
}
).toMap
}
def summaryStats: Map[String, Any] = Picard.getMetrics(output).getOrElse(Map())
}
object CollectAlignmentSummaryMetrics {
......
......@@ -17,11 +17,12 @@ package nl.lumc.sasc.biopet.extensions.picard
import java.io.File
import nl.lumc.sasc.biopet.core.config.Configurable
import nl.lumc.sasc.biopet.core.summary.Summarizable
import org.broadinstitute.gatk.utils.commandline.{ Input, Output, Argument }
/** Extension for picard CollectGcBiasMetrics */
class CollectGcBiasMetrics(val root: Configurable) extends Picard {
javaMainClass = "picard.analysis.CollectGcBiasMetrics"
class CollectGcBiasMetrics(val root: Configurable) extends Picard with Summarizable {
javaMainClass = new picard.analysis.CollectGcBiasMetrics().getClass.getName
@Input(doc = "The input SAM or BAM files to analyze. Must be coordinate sorted.", required = true)
var input: Seq[File] = Nil
......@@ -65,6 +66,12 @@ class CollectGcBiasMetrics(val root: Configurable) extends Picard {
optional("MINIMUM_GENOME_FRACTION=", minGenomeFraction, spaceSeparated = false) +
conditional(assumeSorted, "ASSUME_SORTED=TRUE") +
conditional(isBisulfiteSequinced.getOrElse(false), "IS_BISULFITE_SEQUENCED=TRUE")
/** Returns files for summary */
def summaryFiles: Map[String, File] = Map()
/** Returns stats for summary */
def summaryStats: Map[String, Any] = Picard.getMetrics(output).getOrElse(Map())
}
object CollectGcBiasMetrics {
......
......@@ -24,7 +24,7 @@ import scala.collection.immutable.Nil
/** Extension for picard CollectInsertSizeMetrics */
class CollectInsertSizeMetrics(val root: Configurable) extends Picard with Summarizable {
javaMainClass = "picard.analysis.CollectInsertSizeMetrics"
javaMainClass = new picard.analysis.CollectInsertSizeMetrics().getClass.getName
@Input(doc = "The input SAM or BAM files to analyze. Must be coordinate sorted.", required = true)
var input: File = null
......@@ -75,13 +75,7 @@ class CollectInsertSizeMetrics(val root: Configurable) extends Picard with Summa
/** Returns files for summary */
def summaryFiles: Map[String, File] = Map("output_histogram" -> outputHistogram)
def summaryStats: Map[String, Any] = Picard.getMetrics(output) match {
case None => Map()
case Some((header, content)) =>
(for (i <- 0 to header.size if i < content.head.size)
yield header(i).toLowerCase -> content.head(i)).toMap
}
def summaryStats: Map[String, Any] = Picard.getMetrics(output).getOrElse(Map())
}
object CollectInsertSizeMetrics {
......@@ -90,6 +84,6 @@ object CollectInsertSizeMetrics {
val collectInsertSizeMetrics = new CollectInsertSizeMetrics(root)
collectInsertSizeMetrics.input = input