Commit e371afc7 authored by Peter van 't Hof's avatar Peter van 't Hof
Browse files

Config report now happens in the background

parent 65f98276
...@@ -24,6 +24,8 @@ import org.broadinstitute.gatk.queue.{ QScript, QSettings } ...@@ -24,6 +24,8 @@ import org.broadinstitute.gatk.queue.{ QScript, QSettings }
import org.broadinstitute.gatk.queue.function.QFunction import org.broadinstitute.gatk.queue.function.QFunction
import org.broadinstitute.gatk.queue.util.{ Logging => GatkLogging } import org.broadinstitute.gatk.queue.util.{ Logging => GatkLogging }
import scala.collection.mutable.ListBuffer
/** Base for biopet pipeline */ /** Base for biopet pipeline */
trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript => trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
...@@ -95,6 +97,7 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript => ...@@ -95,6 +97,7 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
logger.info("Running pre commands") logger.info("Running pre commands")
var count = 0 var count = 0
val totalCount = functions.size
for (function <- functions) { for (function <- functions) {
function match { function match {
case f: BiopetCommandLineFunction => case f: BiopetCommandLineFunction =>
...@@ -106,15 +109,15 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript => ...@@ -106,15 +109,15 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
case _ => case _ =>
} }
count += 1 count += 1
if (count % 500 == 0) logger.info(s"Preprocessing done for ${count} jobs out of ${functions.length} total") if (count % 500 == 0) logger.info(s"Preprocessing done for $count jobs out of $totalCount total")
} }
logger.info(s"Preprocessing done for ${functions.length} functions") logger.info(s"Preprocessing done for $totalCount functions")
val logDir = new File(outputDir, ".log" + File.separator + qSettings.runName.toLowerCase) val logDir = new File(outputDir, ".log" + File.separator + qSettings.runName.toLowerCase)
if (outputDir.getParentFile.canWrite || (outputDir.exists && outputDir.canWrite)) if (outputDir.getParentFile.canWrite || (outputDir.exists && outputDir.canWrite))
globalConfig.writeReport(new File(logDir, "config")) globalConfig.writeReport(new File(logDir, "config"))
else Logging.addError("Parent of output dir: '" + outputDir.getParent + "' is not writeable, output directory cannot be created") else Logging.addError("Parent of output dir: '" + outputDir.getParent + "' is not writable, output directory cannot be created")
logger.info("Checking input files") logger.info("Checking input files")
inputFiles.par.foreach { i => inputFiles.par.foreach { i =>
......
...@@ -14,11 +14,11 @@ ...@@ -14,11 +14,11 @@
*/ */
package nl.lumc.sasc.biopet.core.summary package nl.lumc.sasc.biopet.core.summary
import java.io.{File, PrintWriter} import java.io.{ File, PrintWriter }
import java.sql.Date import java.sql.Date
import nl.lumc.sasc.biopet.core._ import nl.lumc.sasc.biopet.core._
import nl.lumc.sasc.biopet.core.extensions.{CheckChecksum, Md5sum} import nl.lumc.sasc.biopet.core.extensions.{ CheckChecksum, Md5sum }
import nl.lumc.sasc.biopet.utils.summary.db.SummaryDb import nl.lumc.sasc.biopet.utils.summary.db.SummaryDb
import org.broadinstitute.gatk.queue.QScript import org.broadinstitute.gatk.queue.QScript
import nl.lumc.sasc.biopet.LastCommitHash import nl.lumc.sasc.biopet.LastCommitHash
......
...@@ -15,9 +15,13 @@ ...@@ -15,9 +15,13 @@
package nl.lumc.sasc.biopet.utils.config package nl.lumc.sasc.biopet.utils.config
import java.io.{ File, PrintWriter } import java.io.{ File, PrintWriter }
import nl.lumc.sasc.biopet.utils.{ Logging, ConfigUtils }
import nl.lumc.sasc.biopet.utils.{ConfigUtils, Logging}
import nl.lumc.sasc.biopet.utils.ConfigUtils._ import nl.lumc.sasc.biopet.utils.ConfigUtils._
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
/** /**
* This class can store nested config values * This class can store nested config values
* @param _map Map with value for new config * @param _map Map with value for new config
...@@ -189,7 +193,7 @@ class Config(protected var _map: Map[String, Any], ...@@ -189,7 +193,7 @@ class Config(protected var _map: Map[String, Any],
} else ConfigValue(requestedIndex, null, null, freeVar) } else ConfigValue(requestedIndex, null, null, freeVar)
} }
def writeReport(directory: File): Unit = { def writeReport(directory: File): Future[Unit] = Future {
directory.mkdirs() directory.mkdirs()
def convertIndexValuesToMap(input: List[(ConfigValueIndex, Any)], forceFreeVar: Option[Boolean] = None): Map[String, Any] = { def convertIndexValuesToMap(input: List[(ConfigValueIndex, Any)], forceFreeVar: Option[Boolean] = None): Map[String, Any] = {
......
...@@ -6,8 +6,8 @@ import slick.driver.H2Driver.api._ ...@@ -6,8 +6,8 @@ import slick.driver.H2Driver.api._
import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future} import scala.concurrent.{ Await, Future }
import java.io.{Closeable, File} import java.io.{ Closeable, File }
import java.sql.Date import java.sql.Date
/** /**
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment