Skip to content
Snippets Groups Projects
Commit 50e5cc55 authored by Sander Bollen's avatar Sander Bollen
Browse files

Merge branch 'fix-BIOPET-497' into 'develop'

Fix biopet 497



See merge request !510
parents 01fbae99 e01e6050
No related branches found
No related tags found
No related merge requests found
......@@ -103,7 +103,7 @@ object PipelineStatus extends ToolCommand {
jobsWriter.println(ConfigUtils.mapToJson(jobsDeps).spaces2)
jobsWriter.close()
futures :+= writeGraphvizFile(jobsDeps, new File(outputDir, s"jobs.gv"), jobDone, jobFailed, jobsStart, deps, plots, plots)
futures :+= writeGraphvizFile(compressOnType(jobsDeps), new File(outputDir, s"compress.jobs.gv"), jobDone, jobFailed, jobsStart, deps, compressPlots, compressPlots)
futures :+= writeGraphvizFile(jobsDeps, new File(outputDir, s"compress.jobs.gv"), jobDone, jobFailed, jobsStart, deps, compressPlots, compressPlots, compress = true)
val mainJobs = deps.jobs.filter(_._2.mainJob == true).map {
case (name, job) =>
......@@ -114,7 +114,7 @@ object PipelineStatus extends ToolCommand {
mainJobsWriter.println(ConfigUtils.mapToJson(mainJobs).spaces2)
mainJobsWriter.close()
futures :+= writeGraphvizFile(mainJobs, new File(outputDir, s"main_jobs.gv"), jobDone, jobFailed, jobsStart, deps, plots, plots)
futures :+= writeGraphvizFile(compressOnType(mainJobs), new File(outputDir, s"compress.main_jobs.gv"), jobDone, jobFailed, jobsStart, deps, compressPlots, compressPlots)
futures :+= writeGraphvizFile(mainJobs, new File(outputDir, s"compress.main_jobs.gv"), jobDone, jobFailed, jobsStart, deps, compressPlots, compressPlots, compress = true)
val totalJobs = deps.jobs.size
val totalStart = jobsStart.size
......@@ -148,40 +148,70 @@ object PipelineStatus extends ToolCommand {
val numberRegex = """(.*)_(\d*)$""".r
def compressOnType(jobs: Map[String, List[String]]): Map[String, List[String]] = {
val set = for ((job, deps) <- jobs.toSet; dep <- deps) yield {
job match {
case numberRegex(name, number) => (name, dep match {
case numberRegex(name, number) => name
})
}
(compressedName(job)._1, compressedName(dep)._1)
}
// This will collapse a Set[(String, String)] to a Map[String, List[String]]
set.groupBy(_._1).map(x => x._1 -> x._2.map(_._2).toList)
}
def compressedName(jobName: String) = jobName match {
case numberRegex(name, number) => (name, number.toInt)
}
def writeGraphvizFile(jobsDeps: Map[String, List[String]],
outputFile: File,
jobDone: Set[String],
jobFailed: Set[String],
jobsStart: Set[String],
deps: Deps,
png: Boolean = true, svg: Boolean = true): Future[Unit] = Future {
png: Boolean = true, svg: Boolean = true, compress: Boolean = false): Future[Unit] = Future {
val graph = if (compress) compressOnType(jobsDeps) else jobsDeps
val writer = new PrintWriter(outputFile)
writer.println("digraph graphname {")
jobDone
.filter(x => jobsDeps.contains(x))
.foreach(x => writer.println(s" $x [color = green]"))
jobFailed
.filter(x => jobsDeps.contains(x))
.foreach(x => writer.println(s" $x [color = red]"))
jobsStart
.filter(x => jobsDeps.contains(x))
.diff(jobDone)
.foreach(x => writer.println(s" $x [color = orange]"))
deps.jobs
.filter(x => jobsDeps.contains(x._1))
.filter(_._2.intermediate)
.foreach(x => writer.println(s" ${x._1} [style = dashed]"))
jobsDeps.foreach { case (a, b) => b.foreach(c => writer.println(s" $c -> $a;")) }
graph.foreach {
case (job, jobDeps) =>
// Writing color of node
val compressTotal = if (compress) Some(deps.jobs.keys.filter(compressedName(_)._1 == job)) else None
val compressDone = if (compress) Some(jobDone.filter(compressedName(_)._1 == job)) else None
val compressFailed = if (compress) Some(jobFailed.filter(compressedName(_)._1 == job)) else None
val compressStart = if (compress) Some(jobsStart.filter(compressedName(_)._1 == job)) else None
val compressIntermediate = if (compress) Some(deps.jobs.filter(x => compressedName(x._1)._1 == job).forall(_._2.intermediate)) else None
if (compress) {
val pend = compressTotal.get.size - compressFailed.get.filterNot(compressStart.get.contains(_)).size - compressStart.get.size - compressDone.get.size
writer.println(s""" $job [label = "$job
|Total: ${compressTotal.get.size}
|Fail: ${compressFailed.get.size}
|Pend:${pend}
|Run: ${compressStart.get.filterNot(compressFailed.get.contains(_)).size}
|Done: ${compressDone.get.size}"]""".stripMargin)
}
if (jobDone.contains(job) || compress && compressTotal == compressDone) writer.println(s" $job [color = green]")
else if (jobFailed.contains(job) || compress && compressFailed.get.nonEmpty) writer.println(s" $job [color = red]")
else if (jobsStart.contains(job) || compress && compressTotal == compressStart) writer.println(s" $job [color = orange]")
// Dashed lined for intermediate jobs
if ((deps.jobs.contains(job) && deps.jobs(job).intermediate) || (compressIntermediate == Some(true)))
writer.println(s" $job [style = dashed]")
// Writing Node deps
jobDeps.foreach { dep =>
if (compress) {
val depsNames = deps.jobs.filter(x => compressedName(x._1)._1 == dep)
.filter(_._2.outputUsedByJobs.exists(x => compressedName(x)._1 == job))
.map(x => x._1 -> x._2.outputUsedByJobs.filter(x => compressedName(x)._1 == job))
val total = depsNames.size
val done = depsNames.map(x => x._2.exists(y => jobDone.contains(x._1))).count(_ == true).toFloat / total
val fail = depsNames.map(x => x._2.exists(y => jobFailed.contains(x._1))).count(_ == true).toFloat / total
val start = (depsNames.map(x => x._2.exists(y => jobsStart.contains(x._1))).count(_ == true).toFloat / total) - fail
if (total > 0) writer.println(s""" $dep -> $job [color="red;%f:orange;%f:green;%f:black;%f"];"""
.format(fail, start, done, 1.0f - done - fail - start))
else writer.println(s" $dep -> $job;")
} else writer.println(s" $dep -> $job;")
}
}
writer.println("}")
writer.close()
......
......@@ -75,8 +75,8 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
/** Function that's need to be executed before the script is accessed */
def init() {
if (inputR1 == null) Logging.addError("Missing input R1 on flexiprep module")
if (sampleId == null || sampleId == None) Logging.addError("Missing sample ID on flexiprep module")
if (libId == null || libId == None) Logging.addError("Missing library ID on flexiprep module")
if (sampleId == null || sampleId.isEmpty) Logging.addError("Missing sample ID on flexiprep module")
if (libId == null || libId.isEmpty) Logging.addError("Missing library ID on flexiprep module")
if (inputR1 == null) Logging.addError("Missing input R1 on flexiprep module")
else {
......@@ -137,13 +137,11 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
}
val seqstatR1 = SeqStat(this, inputR1, outputDir)
seqstatR1.isIntermediate = true
add(seqstatR1)
addSummarizable(seqstatR1, "seqstat_R1")
if (paired) {
val seqstatR2 = SeqStat(this, inputR2.get, outputDir)
seqstatR2.isIntermediate = true
add(seqstatR2)
addSummarizable(seqstatR2, "seqstat_R2")
}
......
......@@ -19,11 +19,11 @@ import java.util.Date
import nl.lumc.sasc.biopet.core._
import nl.lumc.sasc.biopet.core.summary.SummaryQScript
import nl.lumc.sasc.biopet.extensions.bowtie.{Bowtie, Bowtie2}
import nl.lumc.sasc.biopet.extensions.bwa.{BwaAln, BwaMem, BwaSampe, BwaSamse}
import nl.lumc.sasc.biopet.extensions.bowtie.{ Bowtie, Bowtie2 }
import nl.lumc.sasc.biopet.extensions.bwa.{ BwaAln, BwaMem, BwaSampe, BwaSamse }
import nl.lumc.sasc.biopet.extensions.gmap.Gsnap
import nl.lumc.sasc.biopet.extensions.hisat.Hisat2
import nl.lumc.sasc.biopet.extensions.picard.{AddOrReplaceReadGroups, MarkDuplicates, MergeSamFiles, ReorderSam, SortSam}
import nl.lumc.sasc.biopet.extensions.picard.{ AddOrReplaceReadGroups, MarkDuplicates, MergeSamFiles, ReorderSam, SortSam }
import nl.lumc.sasc.biopet.extensions.tools.FastqSplitter
import nl.lumc.sasc.biopet.extensions._
import nl.lumc.sasc.biopet.pipelines.bammetrics.BamMetrics
......@@ -31,7 +31,7 @@ import nl.lumc.sasc.biopet.pipelines.bamtobigwig.Bam2Wig
import nl.lumc.sasc.biopet.pipelines.flexiprep.Flexiprep
import nl.lumc.sasc.biopet.pipelines.gears.GearsSingle
import nl.lumc.sasc.biopet.pipelines.mapping.scripts.TophatRecondition
import nl.lumc.sasc.biopet.utils.{Logging, textToSize}
import nl.lumc.sasc.biopet.utils.{ Logging, textToSize }
import nl.lumc.sasc.biopet.utils.config.Configurable
import org.broadinstitute.gatk.queue.QScript
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment