Commit 251846cb authored by Peter van 't Hof's avatar Peter van 't Hof
Browse files

Merge branch 'develop' into bios-slurm

parents 4ece3248 2467bc10
node('local') {
try {
stage('Init') {
tool 'JDK 8u102'
tool 'Apache Maven 3.3.9'
}
stage('Checkout') {
checkout scm
sh 'git submodule update --init --recursive'
}
stage('Build and Test') {
withMaven(maven: 'Apache Maven 3.3.9', jdk: 'JDK 8u102') {
sh 'mvn -B -T 2 -Dmaven.test.failure.ignore clean package'
}
}
stage('Report tests') {
junit '*/target/surefire-reports/*.xml'
}
if(currentBuild.result == null || "SUCCESS".equals(currentBuild.result)) {
currentBuild.result = "SUCCESS"
slackSend (color: '#00FF00', message: "${currentBuild.result}: Job '${env.JOB_NAME} #${env.BUILD_NUMBER}' (<${env.BUILD_URL}|Open>)", channel: '#biopet-bot', teamDomain: 'lumc', tokenCredentialId: 'lumc')
} else {
slackSend (color: '#FFFF00', message: "${currentBuild.result}: Job '${env.JOB_NAME} #${env.BUILD_NUMBER}' (<${env.BUILD_URL}|Open>)", channel: '#biopet-bot', teamDomain: 'lumc', tokenCredentialId: 'lumc')
}
} catch (e) {
if(currentBuild.result == null || "FAILED".equals(currentBuild.result)) {
currentBuild.result = "FAILED"
}
slackSend (color: '#FF0000', message: "${currentBuild.result}: Job '${env.JOB_NAME} #${env.BUILD_NUMBER}' (<${env.BUILD_URL}|Open>)", channel: '#biopet-bot', teamDomain: 'lumc', tokenCredentialId: 'lumc')
throw e
}
}
......@@ -136,7 +136,7 @@ class BamMetrics(val root: Configurable) extends QScript
ampBedToInterval.isIntermediate = true
add(ampBedToInterval)
val chsMetrics = CalculateHsMetrics(this, inputBam,
val chsMetrics = CollectHsMetrics(this, inputBam,
List(ampIntervals), ampIntervals :: roiIntervals.map(_.intervals), outputDir)
add(chsMetrics)
addSummarizable(chsMetrics, "hs_metrics")
......@@ -150,6 +150,10 @@ class BamMetrics(val root: Configurable) extends QScript
}
// Create stats and coverage plot for each bed/interval file
val allIntervalNames = (roiIntervals ++ ampIntervals).map(_.bed.getName)
if (allIntervalNames.size != allIntervalNames.toSet.size) {
logger.warn("There are multiple region files with the same name. Metric values might get overwritten")
}
for (intervals <- roiIntervals ++ ampIntervals) {
val targetName = intervals.bed.getName.stripSuffix(".bed")
val targetDir = new File(outputDir, targetName)
......@@ -158,14 +162,14 @@ class BamMetrics(val root: Configurable) extends QScript
output = new File(targetDir, inputBam.getName.stripSuffix(".bam") + ".overlap.strict.sam"),
minOverlap = config("strict_intersect_overlap", default = 1.0))
val biopetFlagstatStrict = BiopetFlagstat(this, biStrict.output, targetDir)
addSummarizable(biopetFlagstatStrict, targetName + "_biopet_flagstat_strict")
addSummarizable(biopetFlagstatStrict, targetName + "_flagstats_strict")
add(new BiopetFifoPipe(this, List(biStrict, biopetFlagstatStrict)))
val biLoose = BedtoolsIntersect(this, inputBam, intervals.bed,
output = new File(targetDir, inputBam.getName.stripSuffix(".bam") + ".overlap.loose.sam"),
minOverlap = config("loose_intersect_overlap", default = 0.01))
val biopetFlagstatLoose = BiopetFlagstat(this, biLoose.output, targetDir)
addSummarizable(biopetFlagstatLoose, targetName + "_biopet_flagstat_loose")
addSummarizable(biopetFlagstatLoose, targetName + "_flagstats_loose")
add(new BiopetFifoPipe(this, List(biLoose, biopetFlagstatLoose)))
val sortedBed = BamMetrics.sortedbedCache.getOrElse(intervals.bed, {
......
......@@ -345,8 +345,7 @@ object BammetricsReport extends ReportBuilder {
val pathValues: Map[String, Array[Any]] = paths.map {
case (key, path) =>
val value = summary.getValueAsArray(sampleId, libId, path: _*)
require(value.isDefined, s"Sample: $sampleId, library: $libId on path: '${path.mkString(",")}' does not exist in summary")
key -> value.get
key -> value.getOrElse(Array())
}
require(pathValues.map(_._2.size).toList.distinct.size == 1, s"Arrays in summary does not have the same number of values, $paths")
pathValues
......
......@@ -75,7 +75,7 @@ class BamMetricsTest extends TestNGSuite with Matchers {
bammetrics.functions.count(_.isInstanceOf[CollectRnaSeqMetrics]) shouldBe (if (rna) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectWgsMetrics]) shouldBe (if (wgs) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectMultipleMetrics]) shouldBe 1
bammetrics.functions.count(_.isInstanceOf[CalculateHsMetrics]) shouldBe (if (amplicon) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectHsMetrics]) shouldBe (if (amplicon) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectTargetedPcrMetrics]) shouldBe (if (amplicon) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[BamStats]) shouldBe 1
}
......
......@@ -177,4 +177,52 @@ object BiopetQScript {
require(outputDir.getAbsoluteFile.canWrite, s"No premision to write outputdir: $outputDir")
}
def safeInputs(function: QFunction): Option[Seq[File]] = {
try {
Some(function.inputs)
} catch {
case e: NullPointerException => None
}
}
def safeOutputs(function: QFunction): Option[Seq[File]] = {
try {
Some(function.outputs)
} catch {
case e: NullPointerException => None
}
}
def safeDoneFiles(function: QFunction): Option[Seq[File]] = {
try {
Some(function.doneOutputs)
} catch {
case e: NullPointerException => None
}
}
def safeFailFiles(function: QFunction): Option[Seq[File]] = {
try {
Some(function.failOutputs)
} catch {
case e: NullPointerException => None
}
}
def safeIsDone(function: QFunction): Option[Boolean] = {
try {
Some(function.isDone)
} catch {
case e: NullPointerException => None
}
}
def safeIsFail(function: QFunction): Option[Boolean] = {
try {
Some(function.isFail)
} catch {
case e: NullPointerException => None
}
}
}
......@@ -141,11 +141,17 @@ trait MultiSampleQScript extends SummaryQScript { qscript: QScript =>
/** returns a set with library names or throws error when not found */
protected def libIds: Set[String] = {
val ids = ConfigUtils.getMapFromPath(globalConfig.map, List("samples", sampleId, "libraries")).getOrElse(Map()).keySet
if (ids.isEmpty) {
throw new IllegalStateException(s"No libraries found in config for sample $sampleId")
val ids: Set[String] = try {
ConfigUtils.getMapFromPath(globalConfig.map, List("samples", sampleId, "libraries")).getOrElse(Map()).keySet
} catch {
case e: IllegalStateException if e.getMessage == "Value is not a map: library" =>
Logging.addError("libraries for samples are not formatted correctly")
Set()
}
ids
if (ids.isEmpty) {
Logging.addError(s"No libraries found in config for sample $sampleId")
Set("placeholder")
} else ids
}
/** Name overules the one from qscript */
......
......@@ -97,13 +97,12 @@ object PipelineStatus extends ToolCommand {
val jobsDeps = deps.jobs.map(x => x._1 -> (x._2.dependsOnJobs match {
case l: List[_] => l.map(_.toString)
case _ => throw new IllegalStateException("Value 'depends_on_jobs' is not a list")
}))
val jobsWriter = new PrintWriter(new File(outputDir, s"jobs.json"))
jobsWriter.println(ConfigUtils.mapToJson(jobsDeps).spaces2)
jobsWriter.close()
futures :+= writeGraphvizFile(jobsDeps, new File(outputDir, s"jobs.gv"), jobDone, jobFailed, jobsStart, deps, plots, plots)
futures :+= writeGraphvizFile(compressOnType(jobsDeps), new File(outputDir, s"compress.jobs.gv"), jobDone, jobFailed, jobsStart, deps, compressPlots, compressPlots)
futures :+= writeGraphvizFile(jobsDeps, new File(outputDir, s"compress.jobs.gv"), jobDone, jobFailed, jobsStart, deps, compressPlots, compressPlots, compress = true)
val mainJobs = deps.jobs.filter(_._2.mainJob == true).map {
case (name, job) =>
......@@ -114,7 +113,7 @@ object PipelineStatus extends ToolCommand {
mainJobsWriter.println(ConfigUtils.mapToJson(mainJobs).spaces2)
mainJobsWriter.close()
futures :+= writeGraphvizFile(mainJobs, new File(outputDir, s"main_jobs.gv"), jobDone, jobFailed, jobsStart, deps, plots, plots)
futures :+= writeGraphvizFile(compressOnType(mainJobs), new File(outputDir, s"compress.main_jobs.gv"), jobDone, jobFailed, jobsStart, deps, compressPlots, compressPlots)
futures :+= writeGraphvizFile(mainJobs, new File(outputDir, s"compress.main_jobs.gv"), jobDone, jobFailed, jobsStart, deps, compressPlots, compressPlots, compress = true)
val totalJobs = deps.jobs.size
val totalStart = jobsStart.size
......@@ -148,14 +147,14 @@ object PipelineStatus extends ToolCommand {
val numberRegex = """(.*)_(\d*)$""".r
def compressOnType(jobs: Map[String, List[String]]): Map[String, List[String]] = {
val set = for ((job, deps) <- jobs.toSet; dep <- deps) yield {
job match {
case numberRegex(name, number) => (name, dep match {
case numberRegex(name, number) => name
})
}
(compressedName(job)._1, compressedName(dep)._1)
}
// This will collapse a Set[(String, String)] to a Map[String, List[String]]
set.groupBy(_._1).map(x => x._1 -> x._2.map(_._2).toList)
set.groupBy(_._1).map(x => x._1 -> x._2.map(_._2).toList) ++ jobs.filter(_._2.isEmpty).map(job => compressedName(job._1)._1 -> Nil)
}
def compressedName(jobName: String) = jobName match {
case numberRegex(name, number) => (name, number.toInt)
}
def writeGraphvizFile(jobsDeps: Map[String, List[String]],
......@@ -164,24 +163,54 @@ object PipelineStatus extends ToolCommand {
jobFailed: Set[String],
jobsStart: Set[String],
deps: Deps,
png: Boolean = true, svg: Boolean = true): Future[Unit] = Future {
png: Boolean = true, svg: Boolean = true, compress: Boolean = false): Future[Unit] = Future {
val graph = if (compress) compressOnType(jobsDeps) else jobsDeps
val writer = new PrintWriter(outputFile)
writer.println("digraph graphname {")
jobDone
.filter(x => jobsDeps.contains(x))
.foreach(x => writer.println(s" $x [color = green]"))
jobFailed
.filter(x => jobsDeps.contains(x))
.foreach(x => writer.println(s" $x [color = red]"))
jobsStart
.filter(x => jobsDeps.contains(x))
.diff(jobDone)
.foreach(x => writer.println(s" $x [color = orange]"))
deps.jobs
.filter(x => jobsDeps.contains(x._1))
.filter(_._2.intermediate)
.foreach(x => writer.println(s" ${x._1} [style = dashed]"))
jobsDeps.foreach { case (a, b) => b.foreach(c => writer.println(s" $c -> $a;")) }
graph.foreach {
case (job, jobDeps) =>
// Writing color of node
val compressTotal = if (compress) Some(deps.jobs.keys.filter(compressedName(_)._1 == job)) else None
val compressDone = if (compress) Some(jobDone.filter(compressedName(_)._1 == job)) else None
val compressFailed = if (compress) Some(jobFailed.filter(compressedName(_)._1 == job)) else None
val compressStart = if (compress) Some(jobsStart.filter(compressedName(_)._1 == job)) else None
val compressIntermediate = if (compress) Some(deps.jobs.filter(x => compressedName(x._1)._1 == job).forall(_._2.intermediate)) else None
if (compress) {
val pend = compressTotal.get.size - compressFailed.get.filterNot(compressStart.get.contains(_)).size - compressStart.get.size - compressDone.get.size
writer.println(s""" $job [label = "$job
|Total: ${compressTotal.get.size}
|Fail: ${compressFailed.get.size}
|Pend:${pend}
|Run: ${compressStart.get.filterNot(compressFailed.get.contains(_)).size}
|Done: ${compressDone.get.size}"]""".stripMargin)
}
if (jobDone.contains(job) || compress && compressTotal == compressDone) writer.println(s" $job [color = green]")
else if (jobFailed.contains(job) || compress && compressFailed.get.nonEmpty) writer.println(s" $job [color = red]")
else if (jobsStart.contains(job) || compress && compressTotal == compressStart) writer.println(s" $job [color = orange]")
// Dashed lined for intermediate jobs
if ((deps.jobs.contains(job) && deps.jobs(job).intermediate) || (compressIntermediate == Some(true)))
writer.println(s" $job [style = dashed]")
// Writing Node deps
jobDeps.foreach { dep =>
if (compress) {
val depsNames = deps.jobs.filter(x => compressedName(x._1)._1 == dep)
.filter(_._2.outputUsedByJobs.exists(x => compressedName(x)._1 == job))
.map(x => x._1 -> x._2.outputUsedByJobs.filter(x => compressedName(x)._1 == job))
val total = depsNames.size
val done = depsNames.map(x => x._2.exists(y => jobDone.contains(x._1))).count(_ == true).toFloat / total
val fail = depsNames.map(x => x._2.exists(y => jobFailed.contains(x._1))).count(_ == true).toFloat / total
val start = (depsNames.map(x => x._2.exists(y => jobsStart.contains(x._1))).count(_ == true).toFloat / total) - fail
if (total > 0) writer.println(s""" $dep -> $job [color="red;%f:orange;%f:green;%f:black;%f"];"""
.format(fail, start, done, 1.0f - done - fail - start))
else writer.println(s" $dep -> $job;")
} else writer.println(s" $dep -> $job;")
}
}
writer.println("}")
writer.close()
......
......@@ -18,9 +18,8 @@ import java.io.File
import htsjdk.samtools.reference.IndexedFastaSequenceFile
import nl.lumc.sasc.biopet.core.summary.{ Summarizable, SummaryQScript }
import nl.lumc.sasc.biopet.utils.{ LazyCheck, BamUtils, ConfigUtils, FastaUtils, Logging }
import nl.lumc.sasc.biopet.utils._
import nl.lumc.sasc.biopet.utils.config.{ Config, Configurable }
import nl.lumc.sasc.biopet.utils.{ ConfigUtils, FastaUtils, Logging }
import scala.collection.JavaConversions._
......@@ -177,4 +176,33 @@ object Reference {
if (!dict.exists()) Logging.addError("Reference is missing a dict file")
}
}
def askReference: Map[String, Any] = {
val warn = "If you use a non-standard reference, please make sure that you have generated all required indexes for this reference"
val globalSpecies = Config.global.defaults.getOrElse("references", Map()).asInstanceOf[Map[String, Any]]
val species = Question.string("species",
description = Some(if (globalSpecies.nonEmpty)
s"""Species found in general config:
|- ${globalSpecies.keys.toList.sorted.mkString("\n- ")}
|$warn
|""".stripMargin
else s"No references found in global config. $warn"))
val globalReferences = globalSpecies.getOrElse(species, Map()).asInstanceOf[Map[String, Any]]
val referenceName = Question.string("reference_name",
description = Some(if (globalReferences.nonEmpty)
s"""Reference for $species found in general config:
|- ${globalReferences.keys.toList.sorted.mkString("\n- ")}
|$warn
|""".stripMargin
else s"No references found in global config. $warn"))
val reference = globalReferences.getOrElse(referenceName, Map()).asInstanceOf[Map[String, Any]]
val referenceFasta: Option[String] = if (reference.contains("reference_fasta")) None else {
Some(Question.string("Reference Fasta", validation = List(TemplateTool.isAbsolutePath, TemplateTool.mustExist),
description = Some(s"No fasta file found for $species -> $referenceName")))
}
Map("species" -> species, "reference_name" -> referenceName) ++ referenceFasta.map("reference_fasta" -> _)
}
}
package nl.lumc.sasc.biopet.core
import java.io.{ File, PrintWriter }
import nl.lumc.sasc.biopet.utils.summary.Summary
import nl.lumc.sasc.biopet.utils.{ ConfigUtils, Question, ToolCommand }
import scala.io.Source
/**
* Created by pjvanthof on 17/12/2016.
*/
trait TemplateTool extends ToolCommand {
import TemplateTool._
case class Args(outputConfig: File = null,
runScript: Option[File] = None,
expert: Boolean = false,
template: Option[File] = None) extends AbstractArgs
class OptParser extends AbstractOptParser {
opt[File]('o', "outputConfig") required () valueName "<file>" action { (x, c) =>
c.copy(outputConfig = x)
} text "Path to output config"
opt[File]('s', "outputScript") valueName "<file>" action { (x, c) =>
c.copy(runScript = Some(x))
} text "Path to output script"
opt[File]('t', "template") valueName "<file>" action { (x, c) =>
c.copy(template = Some(x))
} text "Path to template. By default it will try to fetch this from the ENV value 'BIOPET_SCRIPT_TEMPLATE'"
opt[Unit]("expert") action { (x, c) =>
c.copy(expert = true)
} text "This enables expert options / questions"
}
/**
* Program will split fastq file in multiple fastq files
*
* @param args the command line arguments
*/
def main(args: Array[String]): Unit = {
val argsParser = new OptParser
val cmdArgs: Args = argsParser.parse(args, Args()) getOrElse (throw new IllegalArgumentException)
cmdArgs.runScript.foreach(writeScript(_, cmdArgs.outputConfig, sampleConfigs, cmdArgs.template))
val standard: Map[String, Any] = Map("output_dir" -> Question.string("Output directory",
validation = List(isAbsolutePath, parentIsWritable)))
val config = pipelineMap(standard, cmdArgs.expert)
val configWriter = new PrintWriter(cmdArgs.outputConfig)
configWriter.println(ConfigUtils.mapToYaml(config))
configWriter.close()
}
def writeScript(outputFile: File, config: File, samples: List[File], t: Option[File]): Unit = {
val template = t match {
case Some(f) => f
case _ => sys.env.get("BIOPET_SCRIPT_TEMPLATE") match {
case Some(file) => new File(file)
case _ => throw new IllegalArgumentException("No template found on argument or 'BIOPET_SCRIPT_TEMPLATE'")
}
}
val templateReader = Source.fromFile(template)
val scriptWriter = new PrintWriter(outputFile)
val biopetArgs: String = (config :: samples).map(_.getAbsolutePath).mkString("-config ", " \\\n-config ", "")
templateReader.getLines().mkString("\n").format(pipelineName, biopetArgs).foreach(scriptWriter.print)
templateReader.close()
scriptWriter.close()
outputFile.setExecutable(true, false)
}
def pipelineName: String
def pipelineMap(map: Map[String, Any], expert: Boolean): Map[String, Any]
def sampleConfigs: List[File] = Nil
}
object TemplateTool {
def isAbsolutePath(value: String): Boolean = {
if (new File(value).isAbsolute) true else {
println(s"'$value' must be a absulute path")
false
}
}
def mustExist(value: String): Boolean = {
if (new File(value).exists()) true else {
println(s"'$value' does not exist")
false
}
}
def parentIsWritable(value: String): Boolean = {
val parent = new File(value).getParentFile
if (!parent.exists()) {
println(s"$parent does not exist")
false
} else if (!parent.canRead) {
println(s"No permission to read $parent")
false
} else if (!parent.canWrite) {
println(s"No permission to write $parent")
false
} else true
}
def askSampleConfigs(currentList: List[File] = Nil): List[File] = {
val configFile = new File(Question.string("Sample config file", validation = List(mustExist, isAbsolutePath)))
val configMap = new Summary(configFile)
println(s"${configMap.samples.size} samples found in config " +
s"with in total ${configMap.libraries.map(_._2.size).sum} libraries for '$configFile'")
if (Question.boolean("Is this correct?")) {
if (Question.boolean("Add more sample configs?")) askSampleConfigs(configFile :: currentList)
else {
val files = configFile :: currentList
if (files.size > 1) {
val configs = files.map(f => new Summary(ConfigUtils.fileToConfigMap(f)))
val sizes = configs.map(x => (x.samples.size, x.libraries.map(_._2.size).sum))
val samples = configs.flatMap(_.samples.toList)
val libs = configs.flatMap(_.libraries.flatMap { case (s, libs) => libs.toList.map(l => (s, l)) })
val mergedConfig = new Summary(configs.foldLeft(Map[String, Any]())((a, b) => ConfigUtils.mergeMaps(a, b.map)))
val mergesSamples = mergedConfig.samples.size
val mergesLibraries = mergedConfig.libraries.map(_._2.size).sum
if (mergesSamples != samples.size) {
val overlappingSamples = samples.groupBy(s1 => samples.count(s2 => s1 == s2)).filter(_._1 > 1).flatMap(_._2).toList.distinct
println("WARNING: Overlapping samples detected:")
overlappingSamples.foreach(s => println(s" - $s"))
}
if (mergesLibraries != libs.size) {
val overlappingLibs = libs.groupBy(l1 => libs.count(l2 => l1 == l2)).filter(_._1 > 1).flatMap(_._2).toList.distinct
println("WARNING: Overlapping libraries detected")
overlappingLibs.foreach(l => println(s" - ${l._1} -> ${l._2}"))
}
println(s"$mergesSamples samples found in merged config with in total $mergesLibraries libraries")
if (Question.boolean("Is this correct?")) files
else {
println("Resetting sample configs")
askSampleConfigs()
}
} else files
}
} else askSampleConfigs(currentList)
}
}
\ No newline at end of file
......@@ -90,18 +90,13 @@ object WriteDependencies extends Logging with Configurable {
val files: mutable.Map[File, QueueFile] = mutable.Map()
def outputFiles(function: QFunction) = {
if (function.jobErrorFile == null) function.outputs :+ function.jobOutputFile
else function.outputs :+ function.jobOutputFile :+ function.jobErrorFile
}
for (function <- functions) {
for (input <- function.inputs) {
for (input <- BiopetQScript.safeInputs(function).getOrElse(Seq())) {
val file = files.getOrElse(input, QueueFile(input))
file.addInputJob(function)
files += input -> file
}
for (output <- outputFiles(function)) {
for (output <- BiopetQScript.safeOutputs(function).getOrElse(Seq())) {
val file = files.getOrElse(output, QueueFile(output))
file.addOutputJob(function)
files += output -> file
......@@ -118,16 +113,16 @@ object WriteDependencies extends Logging with Configurable {
case s: WriteSummary if s.qscript.root == null => true
case _ => false
}), "intermediate" -> f.isIntermediate,
"depends_on_intermediate" -> f.inputs.exists(files(_).isIntermediate),
"depends_on_jobs" -> f.inputs.toList.flatMap(files(_).outputJobNames).distinct,
"output_used_by_jobs" -> outputFiles(f).toList.flatMap(files(_).inputJobNames).distinct,
"outputs" -> outputFiles(f).toList,
"inputs" -> f.inputs.toList,
"done_files" -> f.doneOutputs.toList,
"fail_files" -> f.failOutputs.toList,
"depends_on_intermediate" -> BiopetQScript.safeOutputs(f).getOrElse(Seq()).exists(files(_).isIntermediate),
"depends_on_jobs" -> BiopetQScript.safeOutputs(f).getOrElse(Seq()).toList.flatMap(files(_).outputJobNames).distinct,
"output_used_by_jobs" -> BiopetQScript.safeOutputs(f).getOrElse(Seq()).toList.flatMap(files(_).inputJobNames).distinct,
"outputs" -> BiopetQScript.safeOutputs(f).getOrElse(Seq()).toList,
"inputs" -> BiopetQScript.safeOutputs(f).getOrElse(Seq()).toList,
"done_files" -> BiopetQScript.safeDoneFiles(f).getOrElse(Seq()).toList,
"fail_files" -> BiopetQScript.safeFailFiles(f).getOrElse(Seq()).toList,
"stdout_file" -> f.jobOutputFile,
"done_at_start" -> f.isDone,
"fail_at_start" -> f.isFail)
"done_at_start" -> BiopetQScript.safeIsDone(f),
"fail_at_start" -> BiopetQScript.safeIsFail(f))
}.toIterator.toMap
val outputFile = new File(outputDir, s"deps.json")
......
......@@ -15,11 +15,12 @@
package nl.lumc.sasc.biopet.core
import java.io.File
import nl.lumc.sasc.biopet.core.MultiSampleQScript.Gender
import nl.lumc.sasc.biopet.core.extensions.Md5sum
import nl.lumc.sasc.biopet.utils.{ ConfigUtils, Logging }
import nl.lumc.sasc.biopet.utils.config.Config
import org.broadinstitute.gatk.queue.QScript
import org.broadinstitute.gatk.queue.{ QScript, QSettings }
import org.scalatest.Matchers
import org.scalatest.testng.TestNGSuite
import org.testng.annotations.Test
......@@ -121,7 +122,7 @@ class MultiSampleQScriptTest extends TestNGSuite with Matchers {