Commit c5d042c6 authored by pjvan_thof's avatar pjvan_thof
Browse files

Adding optional properties file for biopet

parent 4d76382d
...@@ -30,7 +30,6 @@ import scala.util.Random ...@@ -30,7 +30,6 @@ import scala.util.Random
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.concurrent.{Await, Future} import scala.concurrent.{Await, Future}
import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.ExecutionContext.Implicits.global
import org.apache.spark.SparkContext import org.apache.spark.SparkContext
import org.apache.spark.SparkConf import org.apache.spark.SparkConf
...@@ -77,8 +76,6 @@ object VcfStats extends ToolCommand { ...@@ -77,8 +76,6 @@ object VcfStats extends ToolCommand {
"Filtered", "Filtered",
"Variant") "Variant")
//protected var cmdArgs: Args = _
val defaultGenotypeFields = val defaultGenotypeFields =
List("DP", "GQ", "AD", "AD-ref", "AD-alt", "AD-used", "AD-not_used", "general") List("DP", "GQ", "AD", "AD-ref", "AD-alt", "AD-used", "AD-not_used", "general")
...@@ -108,7 +105,7 @@ object VcfStats extends ToolCommand { ...@@ -108,7 +105,7 @@ object VcfStats extends ToolCommand {
logger.info("Init spark context") logger.info("Init spark context")
val conf = new SparkConf() val conf = new SparkConf()
.setAppName(this.getClass.getSimpleName) .setAppName(commandName)
.setMaster(cmdArgs.sparkMaster.getOrElse(s"local[${cmdArgs.localThreads}]")) .setMaster(cmdArgs.sparkMaster.getOrElse(s"local[${cmdArgs.localThreads}]"))
val sparkContext = new SparkContext(conf) val sparkContext = new SparkContext(conf)
......
...@@ -61,7 +61,7 @@ object VcfStatsSpark extends ToolCommand { ...@@ -61,7 +61,7 @@ object VcfStatsSpark extends ToolCommand {
.getURLs .getURLs
.map(_.getFile) .map(_.getFile)
val conf = new SparkConf() val conf = new SparkConf()
.setAppName(this.getClass.getSimpleName) .setAppName(commandName)
.setMaster(cmdArgs.sparkMaster.getOrElse(s"local[${cmdArgs.localThreads}]")) .setMaster(cmdArgs.sparkMaster.getOrElse(s"local[${cmdArgs.localThreads}]"))
.setJars(jars) .setJars(jars)
val sc = new SparkContext(conf) val sc = new SparkContext(conf)
......
spark.driver.memory=500000000
spark.testing.memory=500000000
...@@ -21,6 +21,8 @@ import nl.lumc.sasc.biopet.FullVersion ...@@ -21,6 +21,8 @@ import nl.lumc.sasc.biopet.FullVersion
*/ */
trait ToolCommand extends MainCommand with Logging { trait ToolCommand extends MainCommand with Logging {
loadBiopetProperties()
protected type Args protected type Args
protected type OptParser <: AbstractOptParser[Args] protected type OptParser <: AbstractOptParser[Args]
} }
......
...@@ -114,4 +114,14 @@ package object utils { ...@@ -114,4 +114,14 @@ package object utils {
case _ => a.toString < b.toString case _ => a.toString < b.toString
} }
} }
def loadBiopetProperties(): Unit = {
val is = getClass.getClassLoader.getResourceAsStream("biopet.properties")
if (is != null) {
val prop = System.getProperties
prop.load(is)
System.setProperties(prop)
}
}
} }
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment