Commit 2540e8fd authored by Peter van 't Hof's avatar Peter van 't Hof

Inspection round biopet utils

parent 2c840f96
...@@ -31,7 +31,7 @@ class Bam2Wig(val parent: Configurable) extends QScript with BiopetQScript { ...@@ -31,7 +31,7 @@ class Bam2Wig(val parent: Configurable) extends QScript with BiopetQScript {
def this() = this(null) def this() = this(null)
@Input(doc = "Input bam file", required = true) @Input(doc = "Input bam file", required = true)
var bamFile: File = null var bamFile: File = _
def init(): Unit = { def init(): Unit = {
inputFiles :+= new InputFile(bamFile) inputFiles :+= new InputFile(bamFile)
......
...@@ -32,7 +32,7 @@ class BamToChromSizesTest extends TestNGSuite with Matchers { ...@@ -32,7 +32,7 @@ class BamToChromSizesTest extends TestNGSuite with Matchers {
} }
@Test @Test
def testChromSizes: Unit = { def testChromSizes(): Unit = {
val bamFile = new File(resourcePath("/empty.bam")) val bamFile = new File(resourcePath("/empty.bam"))
val bamToChromSizes = new BamToChromSizes(null) val bamToChromSizes = new BamToChromSizes(null)
bamToChromSizes.bamFile = bamFile bamToChromSizes.bamFile = bamFile
......
...@@ -13,8 +13,8 @@ ...@@ -13,8 +13,8 @@
<tr><th>Version</th><td>${run.version}</td></tr> <tr><th>Version</th><td>${run.version}</td></tr>
<tr><th>Last commit hash</th><td>${run.commitHash}</td></tr> <tr><th>Last commit hash</th><td>${run.commitHash}</td></tr>
<tr><th>Output directory</th><td>${run.outputDir}</td></tr> <tr><th>Output directory</th><td>${run.outputDir}</td></tr>
<tr><th>Sample</th><td>${allSamples.filter(_.id == sampleId.get).headOption.map(_.name)}</td></tr> <tr><th>Sample</th><td>${allSamples.find(_.id == sampleId.get).map(_.name)}</td></tr>
<tr><th>Library</th><td>${allLibraries.filter(_.id == libId.get).headOption.map(_.name)}</td></tr> <tr><th>Library</th><td>${allLibraries.find(_.id == libId.get).map(_.name)}</td></tr>
</tbody> </tbody>
</table> </table>
<br/> <br/>
......
...@@ -48,7 +48,7 @@ class BamMetrics(val parent: Configurable) ...@@ -48,7 +48,7 @@ class BamMetrics(val parent: Configurable)
override def defaults = Map("bedtoolscoverage" -> Map("sorted" -> true)) override def defaults = Map("bedtoolscoverage" -> Map("sorted" -> true))
/** returns files to store in summary */ /** returns files to store in summary */
def summaryFiles = def summaryFiles: Map[String, File] =
Map("reference" -> referenceFasta(), "input_bam" -> inputBam) ++ Map("reference" -> referenceFasta(), "input_bam" -> inputBam) ++
ampliconBedFile.map("amplicon" -> _).toMap ++ ampliconBedFile.map("amplicon" -> _).toMap ++
ampliconBedFile.map(x => "roi_" + x.getName.stripSuffix(".bed") -> x).toMap ampliconBedFile.map(x => "roi_" + x.getName.stripSuffix(".bed") -> x).toMap
...@@ -58,7 +58,7 @@ class BamMetrics(val parent: Configurable) ...@@ -58,7 +58,7 @@ class BamMetrics(val parent: Configurable)
Map("amplicon_name" -> ampliconBedFile.collect { case x => x.getName.stripSuffix(".bed") }, Map("amplicon_name" -> ampliconBedFile.collect { case x => x.getName.stripSuffix(".bed") },
"roi_name" -> roiBedFiles.map(_.getName.stripSuffix(".bed"))) "roi_name" -> roiBedFiles.map(_.getName.stripSuffix(".bed")))
override def reportClass = { override def reportClass: Some[BammetricsReport] = {
val bammetricsReport = new BammetricsReport(this) val bammetricsReport = new BammetricsReport(this)
bammetricsReport.outputDir = new File(outputDir, "report") bammetricsReport.outputDir = new File(outputDir, "report")
bammetricsReport.summaryDbFile = summaryDbFile bammetricsReport.summaryDbFile = summaryDbFile
......
...@@ -487,7 +487,7 @@ object BammetricsReport extends ReportBuilder { ...@@ -487,7 +487,7 @@ object BammetricsReport extends ReportBuilder {
} }
def writeTableToTsv(tsvFile: File, table: Map[String, Array[Any]], firstColumn: String): Unit = { def writeTableToTsv(tsvFile: File, table: Map[String, Array[Any]], firstColumn: String): Unit = {
require(table.map(_._2.size).toList.distinct.size == 1, require(table.map(_._2.length).toList.distinct.size == 1,
"Not all values has the same number or rows") "Not all values has the same number or rows")
val keys = table.keys.filterNot(_ == firstColumn).toList.sorted val keys = table.keys.filterNot(_ == firstColumn).toList.sorted
val writer = new PrintWriter(tsvFile) val writer = new PrintWriter(tsvFile)
......
...@@ -44,7 +44,7 @@ class BamMetricsTest extends TestNGSuite with Matchers { ...@@ -44,7 +44,7 @@ class BamMetricsTest extends TestNGSuite with Matchers {
} }
@DataProvider(name = "bammetricsOptions") @DataProvider(name = "bammetricsOptions")
def bammetricsOptions = { def bammetricsOptions: Array[Array[AnyVal]] = {
val rois = Array(0, 1, 2, 3) val rois = Array(0, 1, 2, 3)
val bool = Array(true, false) val bool = Array(true, false)
...@@ -57,7 +57,7 @@ class BamMetricsTest extends TestNGSuite with Matchers { ...@@ -57,7 +57,7 @@ class BamMetricsTest extends TestNGSuite with Matchers {
private var dirs: List[File] = Nil private var dirs: List[File] = Nil
@Test(dataProvider = "bammetricsOptions") @Test(dataProvider = "bammetricsOptions")
def testBamMetrics(rois: Int, amplicon: Boolean, rna: Boolean, wgs: Boolean) = { def testBamMetrics(rois: Int, amplicon: Boolean, rna: Boolean, wgs: Boolean): Unit = {
val outputDir = Files.createTempDir() val outputDir = Files.createTempDir()
dirs :+= outputDir dirs :+= outputDir
val map = ConfigUtils.mergeMaps(Map("output_dir" -> outputDir, val map = ConfigUtils.mergeMaps(Map("output_dir" -> outputDir,
...@@ -74,8 +74,6 @@ class BamMetricsTest extends TestNGSuite with Matchers { ...@@ -74,8 +74,6 @@ class BamMetricsTest extends TestNGSuite with Matchers {
bammetrics.libId = Some("1") bammetrics.libId = Some("1")
bammetrics.script() bammetrics.script()
var regions: Int = rois + (if (amplicon) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectRnaSeqMetrics]) shouldBe (if (rna) 1 else 0) bammetrics.functions.count(_.isInstanceOf[CollectRnaSeqMetrics]) shouldBe (if (rna) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectWgsMetrics]) shouldBe (if (wgs) 1 else 0) bammetrics.functions.count(_.isInstanceOf[CollectWgsMetrics]) shouldBe (if (wgs) 1 else 0)
bammetrics.functions.count(_.isInstanceOf[CollectMultipleMetrics]) shouldBe 1 bammetrics.functions.count(_.isInstanceOf[CollectMultipleMetrics]) shouldBe 1
...@@ -86,13 +84,13 @@ class BamMetricsTest extends TestNGSuite with Matchers { ...@@ -86,13 +84,13 @@ class BamMetricsTest extends TestNGSuite with Matchers {
} }
// remove temporary run directory all tests in the class have been run // remove temporary run directory all tests in the class have been run
@AfterClass def removeTempOutputDir() = { @AfterClass def removeTempOutputDir(): Unit = {
dirs.foreach(FileUtils.deleteDirectory) dirs.foreach(FileUtils.deleteDirectory)
} }
} }
object BamMetricsTest { object BamMetricsTest {
val inputDir = Files.createTempDir() val inputDir: File = Files.createTempDir()
val bam = new File(inputDir, "bla.bam") val bam = new File(inputDir, "bla.bam")
Files.touch(bam) Files.touch(bam)
...@@ -100,7 +98,7 @@ object BamMetricsTest { ...@@ -100,7 +98,7 @@ object BamMetricsTest {
Files.touch(ampliconBed) Files.touch(ampliconBed)
def roi(i: Int): File = { def roi(i: Int): File = {
val roi = new File(inputDir, s"roi${i}.bed") val roi = new File(inputDir, s"roi$i.bed")
Files.touch(roi) Files.touch(roi)
roi roi
} }
......
...@@ -42,7 +42,7 @@ class Basty(val parent: Configurable) extends QScript with MultiSampleQScript { ...@@ -42,7 +42,7 @@ class Basty(val parent: Configurable) extends QScript with MultiSampleQScript {
def variantcallers = List("unifiedgenotyper") def variantcallers = List("unifiedgenotyper")
val numBoot = config("boot_runs", default = 100, namespace = "raxml").asInt val numBoot: Int = config("boot_runs", default = 100, namespace = "raxml").asInt
override def defaults = Map( override def defaults = Map(
"ploidy" -> 1, "ploidy" -> 1,
......
...@@ -48,7 +48,7 @@ class BastyTest extends TestNGSuite with Matchers { ...@@ -48,7 +48,7 @@ class BastyTest extends TestNGSuite with Matchers {
} }
@DataProvider(name = "bastyOptions") @DataProvider(name = "bastyOptions")
def bastyOptions = { def bastyOptions: Array[Array[Any]] = {
for (s1 <- sample1; s2 <- sample2) yield Array("", s1, s2) for (s1 <- sample1; s2 <- sample2) yield Array("", s1, s2)
} }
...@@ -114,7 +114,7 @@ class BastyTest extends TestNGSuite with Matchers { ...@@ -114,7 +114,7 @@ class BastyTest extends TestNGSuite with Matchers {
numberSamples numberSamples
else 0) else 0)
pipeline.functions.count(_.isInstanceOf[BaseRecalibrator]) shouldBe (if (dbsnp && baseRecalibration) pipeline.functions.count(_.isInstanceOf[BaseRecalibrator]) shouldBe (if (dbsnp && baseRecalibration)
(numberLibs * 2) numberLibs * 2
else 0) else 0)
pipeline.functions.count(_.isInstanceOf[PrintReads]) shouldBe (if (dbsnp && baseRecalibration) pipeline.functions.count(_.isInstanceOf[PrintReads]) shouldBe (if (dbsnp && baseRecalibration)
numberLibs numberLibs
...@@ -125,7 +125,7 @@ class BastyTest extends TestNGSuite with Matchers { ...@@ -125,7 +125,7 @@ class BastyTest extends TestNGSuite with Matchers {
pipeline.summaryFiles shouldBe Map() pipeline.summaryFiles shouldBe Map()
pipeline.samples foreach { pipeline.samples foreach {
case (sampleId, sample) => case (_, sample) =>
sample.summarySettings shouldBe Map() sample.summarySettings shouldBe Map()
sample.summaryFiles.get("variants_fasta") should not be None sample.summaryFiles.get("variants_fasta") should not be None
sample.summaryFiles.get("consensus_fasta") should not be None sample.summaryFiles.get("consensus_fasta") should not be None
...@@ -135,7 +135,7 @@ class BastyTest extends TestNGSuite with Matchers { ...@@ -135,7 +135,7 @@ class BastyTest extends TestNGSuite with Matchers {
sample.summaryFiles.get("snps_only_consensus_variants_fasta") should not be None sample.summaryFiles.get("snps_only_consensus_variants_fasta") should not be None
sample.summaryStats shouldBe Map() sample.summaryStats shouldBe Map()
sample.libraries.foreach { sample.libraries.foreach {
case (libId, lib) => case (_, lib) =>
lib.summarySettings shouldBe Map() lib.summarySettings shouldBe Map()
lib.summaryFiles shouldBe Map() lib.summaryFiles shouldBe Map()
lib.summaryStats shouldBe Map() lib.summaryStats shouldBe Map()
...@@ -154,14 +154,14 @@ class BastyTest extends TestNGSuite with Matchers { ...@@ -154,14 +154,14 @@ class BastyTest extends TestNGSuite with Matchers {
} }
// remove temporary run directory all tests in the class have been run // remove temporary run directory all tests in the class have been run
@AfterClass def removeTempOutputDir() = { @AfterClass def removeTempOutputDir(): Unit = {
dirs.foreach(FileUtils.deleteDirectory) dirs.foreach(FileUtils.deleteDirectory)
} }
} }
object BastyTest { object BastyTest {
def outputDir = Files.createTempDir() def outputDir: File = Files.createTempDir()
val inputDir = Files.createTempDir() val inputDir: File = Files.createTempDir()
def inputTouch(name: String): String = { def inputTouch(name: String): String = {
val file = new File(inputDir, name) val file = new File(inputDir, name)
......
...@@ -35,7 +35,7 @@ object BamUtils { ...@@ -35,7 +35,7 @@ object BamUtils {
* *
* @throws IllegalArgumentException * @throws IllegalArgumentException
* @param bamFiles input bam files * @param bamFiles input bam files
* @return * @return Map of sample bam files
*/ */
def sampleBamMap(bamFiles: List[File]): Map[String, File] = { def sampleBamMap(bamFiles: List[File]): Map[String, File] = {
val temp = bamFiles.map { file => val temp = bamFiles.map { file =>
...@@ -59,7 +59,7 @@ object BamUtils { ...@@ -59,7 +59,7 @@ object BamUtils {
* *
* @param inputBam input bam file * @param inputBam input bam file
* @param contig contig to scan for * @param contig contig to scan for
* @param end postion to stop scanning * @param end position to stop scanning
* @return Int with insertsize for this contig * @return Int with insertsize for this contig
*/ */
def contigInsertSize(inputBam: File, def contigInsertSize(inputBam: File,
...@@ -82,7 +82,7 @@ object BamUtils { ...@@ -82,7 +82,7 @@ object BamUtils {
val counts: mutable.Map[Int, Int] = mutable.Map() val counts: mutable.Map[Int, Int] = mutable.Map()
for (i <- 0 until samplingSize if samIterator.hasNext) { for (_ <- 0 until samplingSize if samIterator.hasNext) {
val rec = samIterator.next() val rec = samIterator.next()
val isPaired = rec.getReadPairedFlag val isPaired = rec.getReadPairedFlag
val minQ10 = rec.getMappingQuality >= 10 val minQ10 = rec.getMappingQuality >= 10
...@@ -97,27 +97,24 @@ object BamUtils { ...@@ -97,27 +97,24 @@ object BamUtils {
counts.keys.size match { counts.keys.size match {
case 1 => Some(counts.keys.head) case 1 => Some(counts.keys.head)
case 0 => None case 0 => None
case _ => { case _ =>
Some(counts.foldLeft(0)((old, observation) => { Some(counts.foldLeft(0)((old, observation) => {
observation match { observation match {
case (insertSize: Int, observations: Int) => { case (insertSize: Int, observations: Int) =>
(old + (insertSize * observations)) / (observations + 1) (old + (insertSize * observations)) / (observations + 1)
}
case _ => 0 case _ => 0
} }
})) }))
}
} }
}) })
insertSizesOnAllFragments.size match { insertSizesOnAllFragments.size match {
case 1 => Some(insertSizesOnAllFragments.head) case 1 => Some(insertSizesOnAllFragments.head)
case 0 => None case 0 => None
case _ => { case _ =>
Some(insertSizesOnAllFragments.foldLeft(0)((old, observation) => { Some(insertSizesOnAllFragments.foldLeft(0)((old, observation) => {
(old + observation) / 2 (old + observation) / 2
})) }))
}
} }
} }
...@@ -140,10 +137,10 @@ object BamUtils { ...@@ -140,10 +137,10 @@ object BamUtils {
binSize) binSize)
}) })
.toList .toList
val counts = bamInsertSizes.flatMap(x => x) val counts = bamInsertSizes.flatten
// avoid division by zero // avoid division by zero
if (counts.size != 0) counts.sum / counts.size if (counts.nonEmpty) counts.sum / counts.size
else 0 else 0
} }
...@@ -161,7 +158,7 @@ object BamUtils { ...@@ -161,7 +158,7 @@ object BamUtils {
}.toMap }.toMap
/** This class will add functionality to [[SAMSequenceDictionary]] */ /** This class will add functionality to [[SAMSequenceDictionary]] */
implicit class SamDictCheck(samDics: SAMSequenceDictionary) { implicit class SamDictCheck(samDicts: SAMSequenceDictionary) {
/** /**
* This method will check if all contig and sizes are the same without looking at the order of the contigs * This method will check if all contig and sizes are the same without looking at the order of the contigs
...@@ -172,15 +169,15 @@ object BamUtils { ...@@ -172,15 +169,15 @@ object BamUtils {
*/ */
def assertSameDictionary(that: SAMSequenceDictionary, ignoreOrder: Boolean): Unit = { def assertSameDictionary(that: SAMSequenceDictionary, ignoreOrder: Boolean): Unit = {
if (ignoreOrder) { if (ignoreOrder) {
assert(samDics.getReferenceLength == that.getReferenceLength) assert(samDicts.getReferenceLength == that.getReferenceLength)
val thisContigNames = val thisContigNames =
samDics.getSequences.map(x => (x.getSequenceName, x.getSequenceLength)).sorted.toSet samDicts.getSequences.map(x => (x.getSequenceName, x.getSequenceLength)).sorted.toSet
assert( assert(
thisContigNames == that.getSequences thisContigNames == that.getSequences
.map(x => (x.getSequenceName, x.getSequenceLength)) .map(x => (x.getSequenceName, x.getSequenceLength))
.sorted .sorted
.toSet) .toSet)
} else samDics.assertSameDictionary(that) } else samDicts.assertSameDictionary(that)
} }
} }
} }
...@@ -35,7 +35,7 @@ object ConfigUtils extends Logging { ...@@ -35,7 +35,7 @@ object ConfigUtils extends Logging {
* *
* @param map1 input map * @param map1 input map
* @param map2 input map * @param map2 input map
* @return Uniqe map1 * @return Unique map1
*/ */
def uniqueKeys(map1: Map[String, Any], map2: Map[String, Any]): Map[String, Any] = { def uniqueKeys(map1: Map[String, Any], map2: Map[String, Any]): Map[String, Any] = {
filterEmtpyMapValues( filterEmtpyMapValues(
...@@ -57,32 +57,34 @@ object ConfigUtils extends Logging { ...@@ -57,32 +57,34 @@ object ConfigUtils extends Logging {
*/ */
def filterEmtpyMapValues(map: Map[String, Any]): Map[String, Any] = { def filterEmtpyMapValues(map: Map[String, Any]): Map[String, Any] = {
map.filter { map.filter {
case (key, value: Map[_, _]) => value.nonEmpty case (_, value: Map[_, _]) => value.nonEmpty
case _ => true case _ => true
} }
} }
/** /**
* Merge 2 maps, when value is in a map in map1 and map2 the value calls recursively this function * Merge 2 maps, when value is in a map in map1 and map2 the value calls recursively this function
*
* @param map1 Prio over map2 * @param map1 Prio over map2
* @param map2 Backup for map1 * @param map2 Backup for map1
* @return merged map * @return merged map
*/ */
//noinspection ScalaUnnecessaryParentheses,ScalaUnnecessaryParentheses,ScalaUnnecessaryParentheses,ScalaUnnecessaryParentheses,ScalaUnnecessaryParentheses
def mergeMaps( def mergeMaps(
map1: Map[String, Any], map1: Map[String, Any],
map2: Map[String, Any], map2: Map[String, Any],
resolveConflict: (Any, Any, String) => Any = (m1, m2, key) => m1): Map[String, Any] = { resolveConflict: (Any, Any, String) => Any = (m1, _, _) => m1): Map[String, Any] = {
(for (key <- map1.keySet.++(map2.keySet)) yield { (for (key <- map1.keySet.++(map2.keySet)) yield {
if (!map2.contains(key)) (key -> map1(key)) if (!map2.contains(key)) key -> map1(key)
else if (!map1.contains(key)) (key -> map2(key)) else if (!map1.contains(key)) key -> map2(key)
else { else {
map1(key) match { map1(key) match {
case m1: Map[_, _] => case m1: Map[_, _] =>
map2(key) match { map2(key) match {
case m2: Map[_, _] => (key -> mergeMaps(any2map(m1), any2map(m2), resolveConflict)) case m2: Map[_, _] => key -> mergeMaps(any2map(m1), any2map(m2), resolveConflict)
case _ => (key -> map1(key)) case _ => key -> map1(key)
} }
case _ => (key -> resolveConflict(map1(key), map2(key), key)) case _ => key -> resolveConflict(map1(key), map2(key), key)
} }
} }
}).toMap }).toMap
...@@ -122,7 +124,7 @@ object ConfigUtils extends Logging { ...@@ -122,7 +124,7 @@ object ConfigUtils extends Logging {
} else Some(map) } else Some(map)
} }
/** Make json aboject from a file */ /** Make json object from a file */
def fileToJson(configFile: File): Json = { def fileToJson(configFile: File): Json = {
logger.debug("Jsonfile: " + configFile) logger.debug("Jsonfile: " + configFile)
val jsonText = scala.io.Source.fromFile(configFile).mkString val jsonText = scala.io.Source.fromFile(configFile).mkString
...@@ -172,15 +174,16 @@ object ConfigUtils extends Logging { ...@@ -172,15 +174,16 @@ object ConfigUtils extends Logging {
lazy val yaml = new Yaml() lazy val yaml = new Yaml()
def mapToYaml(map: Map[String, Any]) = yaml.dump(yaml.load(ConfigUtils.mapToJson(map).nospaces)) def mapToYaml(map: Map[String, Any]): JsonField =
yaml.dump(yaml.load(ConfigUtils.mapToJson(map).nospaces))
def mapToYamlFile(map: Map[String, Any], outputFile: File) = { def mapToYamlFile(map: Map[String, Any], outputFile: File): Unit = {
val writer = new PrintWriter(outputFile) val writer = new PrintWriter(outputFile)
writer.println(mapToYaml(map)) writer.println(mapToYaml(map))
writer.close() writer.close()
} }
def mapToJsonFile(map: Map[String, Any], outputFile: File) = { def mapToJsonFile(map: Map[String, Any], outputFile: File): Unit = {
val writer = new PrintWriter(outputFile) val writer = new PrintWriter(outputFile)
writer.println(anyToJson(map).toString()) writer.println(anyToJson(map).toString())
writer.close() writer.close()
...@@ -531,13 +534,13 @@ object ConfigUtils extends Logging { ...@@ -531,13 +534,13 @@ object ConfigUtils extends Logging {
/** Convert ConfigValue to List[Double] */ /** Convert ConfigValue to List[Double] */
implicit def configValue2doubleList(value: ConfigValue): List[Double] = { implicit def configValue2doubleList(value: ConfigValue): List[Double] = {
if (requiredValue(value)) any2list(value.value).map(any2double(_)) if (requiredValue(value)) any2list(value.value).map(any2double)
else Nil else Nil
} }
/** Convert ConfigValue to List[Int] */ /** Convert ConfigValue to List[Int] */
implicit def configValue2intList(value: ConfigValue): List[Int] = { implicit def configValue2intList(value: ConfigValue): List[Int] = {
if (requiredValue(value)) any2list(value.value).map(any2int(_)) if (requiredValue(value)) any2list(value.value).map(any2int)
else Nil else Nil
} }
......
...@@ -43,7 +43,7 @@ object FastaUtils { ...@@ -43,7 +43,7 @@ object FastaUtils {
private var dictCache: Map[File, SAMSequenceDictionary] = Map() private var dictCache: Map[File, SAMSequenceDictionary] = Map()
/** This will clear the dict cache */ /** This will clear the dict cache */
def clearCache() = { def clearCache(): Unit = {
dictCache = Map() dictCache = Map()
} }
...@@ -84,8 +84,8 @@ object FastaUtils { ...@@ -84,8 +84,8 @@ object FastaUtils {
.map { line => .map { line =>
val columns = line.split("\t") val columns = line.split("\t")
val refContig = columns(0) val refContig = columns(0)
val alterniveNames = columns(1).split(";").toSet val alternativeNames = columns(1).split(";").toSet
refContig -> alterniveNames refContig -> alternativeNames
} }
.toMap .toMap
reader.close() reader.close()
......
...@@ -72,12 +72,7 @@ object IoUtils { ...@@ -72,12 +72,7 @@ object IoUtils {
else fname else fname
} }
/** /** This return the contends of a file as a List[String] */
* This return the contends of a file as a List[String]
*
* @param file
* @return
*/
def getLinesFromFile(file: File): List[String] = { def getLinesFromFile(file: File): List[String] = {
val reader = Source.fromFile(file) val reader = Source.fromFile(file)
val lines = reader.getLines().toList val lines = reader.getLines().toList
...@@ -87,16 +82,15 @@ object IoUtils { ...@@ -87,16 +82,15 @@ object IoUtils {
def executableExist(exe: String): Boolean = { def executableExist(exe: String): Boolean = {
try { try {
val process = Process(Seq(exe)).run()
true true
} catch { } catch {
case e: IOException => false case _: IOException => false
} }
} }
def writeLinesToFile(output: File, lines: List[String]): Unit = { def writeLinesToFile(output: File, lines: List[String]): Unit = {
val writer = new PrintWriter(output) val writer = new PrintWriter(output)
lines.foreach(writer.println(_)) lines.foreach(writer.println)
writer.close() writer.close()
} }
} }
...@@ -19,12 +19,12 @@ package nl.lumc.sasc.biopet.utils ...@@ -19,12 +19,12 @@ package nl.lumc.sasc.biopet.utils
*/ */
class LazyCheck[T](function: => T) { class LazyCheck[T](function: => T) {
private var _isSet = false private var _isSet = false
def isSet = _isSet def isSet: Boolean = _isSet
lazy val value = { lazy val value: T = {
val cache = function val cache = function
_isSet = true _isSet = true
cache cache
} }
def apply() = value def apply(): T = value
def get = value def get: T = value
} }
...@@ -19,7 +19,7 @@ package nl.lumc.sasc.biopet.utils ...@@ -19,7 +19,7 @@ package nl.lumc.sasc.biopet.utils