Commit 6dd35b5a authored by Sander Bollen's avatar Sander Bollen

Merge branch 'fix-BIOPET-475' into 'develop'

Fix biopet 475



See merge request !492
parents 03852d0d 42b34355
......@@ -19,6 +19,7 @@ import java.io.File
import nl.lumc.sasc.biopet.core.summary.{ SummaryQScript, WriteSummary }
import nl.lumc.sasc.biopet.utils.config.Configurable
import nl.lumc.sasc.biopet.core.report.ReportBuilderExtension
import nl.lumc.sasc.biopet.core.workaround.BiopetQCommandLine
import nl.lumc.sasc.biopet.utils.Logging
import org.broadinstitute.gatk.queue.{ QScript, QSettings }
import org.broadinstitute.gatk.queue.function.QFunction
......@@ -103,8 +104,10 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
case _ =>
}
val logDir = new File(outputDir, ".log" + File.separator + qSettings.runName.toLowerCase)
if (outputDir.getParentFile.canWrite || (outputDir.exists && outputDir.canWrite))
globalConfig.writeReport(qSettings.runName, new File(outputDir, ".log/" + qSettings.runName))
globalConfig.writeReport(new File(logDir, "config"))
else Logging.addError("Parent of output dir: '" + outputDir.getParent + "' is not writeable, output directory cannot be created")
logger.info("Checking input files")
......@@ -123,7 +126,9 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
}
})
if (!skipWriteDependencies) WriteDependencies.writeDependencies(functions, new File(outputDir, ".log"), qSettings.runName)
if (!skipWriteDependencies) WriteDependencies.writeDependencies(
functions,
new File(logDir, "graph"))
Logging.checkErrors()
logger.info("Script complete without errors")
......@@ -171,4 +176,5 @@ object BiopetQScript {
require(outputDir.getAbsoluteFile.canRead, s"No premision to read outputdir: $outputDir")
require(outputDir.getAbsoluteFile.canWrite, s"No premision to write outputdir: $outputDir")
}
}
......@@ -73,15 +73,14 @@ trait PipelineCommand extends MainCommand with GatkLogging with ImplicitConversi
}
}
val logFile = {
val pipelineName = this.getClass.getSimpleName.toLowerCase.split("""\$""").head
val pipelineConfig = globalConfig.map.getOrElse(pipelineName, Map()).asInstanceOf[Map[String, Any]]
val pipelineOutputDir = new File(globalConfig.map.getOrElse("output_dir", pipelineConfig.getOrElse("output_dir", "./")).toString)
BiopetQScript.checkOutputDir(pipelineOutputDir)
val logDir: File = new File(pipelineOutputDir, ".log")
logDir.mkdirs()
new File(logDir, "biopet." + BiopetQCommandLine.timestamp + ".log")
}
val pipelineName = this.getClass.getSimpleName.toLowerCase.split("""\$""").head
val pipelineConfig = globalConfig.map.getOrElse(pipelineName, Map()).asInstanceOf[Map[String, Any]]
val pipelineOutputDir = new File(globalConfig.map.getOrElse("output_dir", pipelineConfig.getOrElse("output_dir", "./")).toString)
BiopetQScript.checkOutputDir(pipelineOutputDir)
val logDir: File = new File(pipelineOutputDir, ".log" + File.separator + pipelineName + "." + BiopetQCommandLine.timestamp)
logDir.mkdirs()
val logFile = new File(logDir, "biopet.log")
val a = new WriterAppender(new PatternLayout("%-5p [%d] [%C{1}] - %m%n"), new PrintWriter(logFile))
Logging.logger.addAppender(a)
......@@ -90,7 +89,7 @@ trait PipelineCommand extends MainCommand with GatkLogging with ImplicitConversi
argv ++= Array("-S", pipeline)
argv ++= args
if (!args.contains("--log_to_file") && !args.contains("-log")) {
argv ++= List("--log_to_file", new File(logFile.getParentFile, "queue." + BiopetQCommandLine.timestamp + ".log").getAbsolutePath)
argv ++= List("--log_to_file", new File(logDir, "queue.log").getAbsolutePath)
}
if (!args.contains("-retry") && !args.contains("--retry_failed")) {
val retry: Int = globalConfig(pipelineName, Nil, "retry", default = 5)
......
......@@ -48,9 +48,9 @@ object WriteDependencies extends Logging with Configurable {
*
* @param functions This should be all functions that are given to the graph of Queue
* @param outputDir
* @param prefix prefix
*/
def writeDependencies(functions: Seq[QFunction], outputDir: File, prefix: String): Unit = {
def writeDependencies(functions: Seq[QFunction], outputDir: File): Unit = {
outputDir.mkdirs()
logger.info("Start calculating dependencies")
val errorOnMissingInput: Boolean = config("error_on_missing_input", false)
......@@ -130,7 +130,7 @@ object WriteDependencies extends Logging with Configurable {
"fail_at_start" -> f.isFail)
}.toIterator.toMap
val outputFile = new File(outputDir, s"$prefix.deps.json")
val outputFile = new File(outputDir, s"deps.json")
logger.info(s"Writing dependencies to: $outputFile")
val writer = new PrintWriter(outputFile)
writer.println(ConfigUtils.mapToJson(Map(
......@@ -143,22 +143,22 @@ object WriteDependencies extends Logging with Configurable {
case l: List[_] => l.map(_.toString)
case _ => throw new IllegalStateException("Value 'depends_on_jobs' is not a list")
}))
val jobsWriter = new PrintWriter(new File(outputDir, s"$prefix.jobs.json"))
val jobsWriter = new PrintWriter(new File(outputDir, s"jobs.json"))
jobsWriter.println(ConfigUtils.mapToJson(jobsDeps).spaces2)
jobsWriter.close()
writeGraphvizFile(jobsDeps, new File(outputDir, s"$prefix.jobs.gv"))
writeGraphvizFile(compressOnType(jobsDeps), new File(outputDir, s"$prefix.compress.jobs.gv"))
writeGraphvizFile(jobsDeps, new File(outputDir, s"jobs.gv"))
writeGraphvizFile(compressOnType(jobsDeps), new File(outputDir, s"compress.jobs.gv"))
val mainJobs = jobs.filter(_._2("main_job") == true).map {
case (name, job) =>
name -> getMainDependencies(name, jobs)
}
val mainJobsWriter = new PrintWriter(new File(outputDir, s"$prefix.main_jobs.json"))
val mainJobsWriter = new PrintWriter(new File(outputDir, s"main_jobs.json"))
mainJobsWriter.println(ConfigUtils.mapToJson(mainJobs).spaces2)
mainJobsWriter.close()
writeGraphvizFile(mainJobs, new File(outputDir, s"$prefix.main_jobs.gv"))
writeGraphvizFile(compressOnType(mainJobs), new File(outputDir, s"$prefix.compress.main_jobs.gv"))
writeGraphvizFile(mainJobs, new File(outputDir, s"main_jobs.gv"))
writeGraphvizFile(compressOnType(mainJobs), new File(outputDir, s"compress.main_jobs.gv"))
logger.info("done calculating dependencies")
}
......
......@@ -39,15 +39,14 @@ class WriteDependenciesTest extends TestNGSuite with Matchers {
}
@Test
def testDeps: Unit = {
def testDeps(): Unit = {
val tempDir = Files.createTempDir()
tempDir.deleteOnExit()
val prefix = "test"
val outputFile = new File(tempDir, s"$prefix.deps.json")
val outputFile = new File(tempDir, s"deps.json")
outputFile.deleteOnExit()
val func1 = Qfunc(file1 :: Nil, file2 :: Nil)
val func2 = Qfunc(file2 :: Nil, file3 :: Nil)
WriteDependencies.writeDependencies(func1 :: func2 :: Nil, tempDir, prefix)
WriteDependencies.writeDependencies(func1 :: func2 :: Nil, tempDir)
val deps = ConfigUtils.fileToConfigMap(outputFile)
deps("jobs") shouldBe a[Map[_, _]]
val jobs = deps("jobs").asInstanceOf[Map[String, Map[String, Any]]]
......@@ -55,7 +54,7 @@ class WriteDependenciesTest extends TestNGSuite with Matchers {
deps("files") shouldBe a[List[_]]
val files = deps("files").asInstanceOf[List[Map[String, Any]]]
val paths = files.map(x => x.get("path")).flatten
val paths = files.flatMap(x => x.get("path"))
assert(paths.contains(file1.toString))
assert(paths.contains(file2.toString))
assert(paths.contains(file3.toString))
......
......@@ -189,7 +189,7 @@ class Config(protected var _map: Map[String, Any],
} else ConfigValue(requestedIndex, null, null, freeVar)
}
def writeReport(id: String, directory: File): Unit = {
def writeReport(directory: File): Unit = {
directory.mkdirs()
def convertIndexValuesToMap(input: List[(ConfigValueIndex, Any)], forceFreeVar: Option[Boolean] = None): Map[String, Any] = {
......@@ -205,7 +205,7 @@ class Config(protected var _map: Map[String, Any],
}
def writeMapToJsonFile(map: Map[String, Any], name: String): Unit = {
val file = new File(directory, id + "." + name + ".json")
val file = new File(directory, name + ".json")
val writer = new PrintWriter(file)
writer.write(ConfigUtils.mapToJson(map).spaces2)
writer.close()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment