Commit 33f56126 authored by Peter van 't Hof's avatar Peter van 't Hof
Browse files

Fix code style warnings

parent a3af8046
......@@ -40,6 +40,6 @@ object SquishBed {
val squishBed = new SquishBed(root)
squishBed.input = input
squishBed.output = new File(outputDir, input.getName.stripSuffix(".bed") + ".squish.bed")
return squishBed
squishBed
}
}
......@@ -26,13 +26,15 @@ import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.io.Source
/**
* Created by pjvan_thof on 1/10/15.
*/
class AnnotateVcfWithBed {
// TODO: Queue wrapper
}
/**
* This a tools to annotate a vcf file with values from a bed file
*
* Created by pjvan_thof on 1/10/15.
*/
object AnnotateVcfWithBed extends ToolCommand {
/**
......@@ -52,30 +54,29 @@ object AnnotateVcfWithBed extends ToolCommand {
fieldType: String = "String") extends AbstractArgs
class OptParser extends AbstractOptParser {
opt[File]('I', "inputFile") required () unbounded () valueName ("<vcf file>") action { (x, c) =>
opt[File]('I', "inputFile") required () unbounded () valueName "<vcf file>" action { (x, c) =>
c.copy(inputFile = x)
} text ("Input is a required file property")
opt[File]('B', "bedFile") required () unbounded () valueName ("<bed file>") action { (x, c) =>
} text "Input is a required file property"
opt[File]('B', "bedFile") required () unbounded () valueName "<bed file>" action { (x, c) =>
c.copy(bedFile = x)
} text ("Bedfile is a required file property")
opt[File]('o', "output") required () unbounded () valueName ("<vcf file>") action { (x, c) =>
} text "Bedfile is a required file property"
opt[File]('o', "output") required () unbounded () valueName "<vcf file>" action { (x, c) =>
c.copy(outputFile = x)
} text ("out is a required file property")
opt[String]('f', "fieldName") required () unbounded () valueName ("<name of field in vcf file>") action { (x, c) =>
} text "out is a required file property"
opt[String]('f', "fieldName") required () unbounded () valueName "<name of field in vcf file>" action { (x, c) =>
c.copy(fieldName = x)
} text ("Name of info field in new vcf file")
opt[String]('d', "fieldDescription") unbounded () valueName ("<name of field in vcf file>") action { (x, c) =>
} text "Name of info field in new vcf file"
opt[String]('d', "fieldDescription") unbounded () valueName "<name of field in vcf file>" action { (x, c) =>
c.copy(fieldDescription = x)
} text ("Description of field in new vcf file")
opt[String]('t', "fieldType") unbounded () valueName ("<name of field in vcf file>") action { (x, c) =>
} text "Description of field in new vcf file"
opt[String]('t', "fieldType") unbounded () valueName "<name of field in vcf file>" action { (x, c) =>
c.copy(fieldType = x)
} text ("Description of field in new vcf file")
} text "Description of field in new vcf file"
}
/**
* Program will Annotate a vcf file with the overlapping regions of a bed file, 4e column of the bed file we in a info tag in the vcf file
*
* @param args
* Program will Annotate a vcf file with the overlapping regions of a bed file,
* 4e column of the bed file we in a info tag in the vcf file
*/
def main(args: Array[String]): Unit = {
......@@ -108,7 +109,7 @@ object AnnotateVcfWithBed extends ToolCommand {
val values = line.split("\t")
if (values.size >= 4)
bedRecords(values(0)) = (values(1).toInt, values(2).toInt, values(3)) :: bedRecords.getOrElse(values(0), Nil)
else (values.size >= 3 && fieldType == VCFHeaderLineType.Flag)
else values.size >= 3 && fieldType == VCFHeaderLineType.Flag
bedRecords(values(0)) = (values(1).toInt, values(2).toInt, "") :: bedRecords.getOrElse(values(0), Nil)
}
......@@ -148,8 +149,8 @@ object AnnotateVcfWithBed extends ToolCommand {
writer.add(builder.make)
}
}
reader.close
writer.close
reader.close()
writer.close()
logger.info("Done")
}
......
......@@ -58,8 +58,8 @@ class BastyGenerateFasta(val root: Configurable) extends ToolCommandFuntion with
override val defaultCoreMemory = 4.0
override def beforeGraph: Unit = {
super.beforeGraph
override def beforeGraph(): Unit = {
super.beforeGraph()
reference = referenceFasta()
}
......@@ -91,43 +91,43 @@ object BastyGenerateFasta extends ToolCommand {
reference: File = null) extends AbstractArgs
class OptParser extends AbstractOptParser {
opt[File]('V', "inputVcf") unbounded () valueName ("<file>") action { (x, c) =>
opt[File]('V', "inputVcf") unbounded () valueName "<file>" action { (x, c) =>
c.copy(inputVcf = x)
} text ("vcf file, needed for outputVariants and outputConsensusVariants") validate { x =>
} text "vcf file, needed for outputVariants and outputConsensusVariants" validate { x =>
if (x.exists) success else failure("File does not exist: " + x)
}
opt[File]("bamFile") unbounded () valueName ("<file>") action { (x, c) =>
opt[File]("bamFile") unbounded () valueName "<file>" action { (x, c) =>
c.copy(bamFile = x)
} text ("bam file, needed for outputConsensus and outputConsensusVariants") validate { x =>
} text "bam file, needed for outputConsensus and outputConsensusVariants" validate { x =>
if (x.exists) success else failure("File does not exist: " + x)
}
opt[File]("outputVariants") maxOccurs (1) unbounded () valueName ("<file>") action { (x, c) =>
opt[File]("outputVariants") maxOccurs 1 unbounded () valueName "<file>" action { (x, c) =>
c.copy(outputVariants = x)
} text ("fasta with only variants from vcf file")
opt[File]("outputConsensus") maxOccurs (1) unbounded () valueName ("<file>") action { (x, c) =>
} text "fasta with only variants from vcf file"
opt[File]("outputConsensus") maxOccurs 1 unbounded () valueName "<file>" action { (x, c) =>
c.copy(outputConsensus = x)
} text ("Consensus fasta from bam, always reference bases else 'N'")
opt[File]("outputConsensusVariants") maxOccurs (1) unbounded () valueName ("<file>") action { (x, c) =>
} text "Consensus fasta from bam, always reference bases else 'N'"
opt[File]("outputConsensusVariants") maxOccurs 1 unbounded () valueName "<file>" action { (x, c) =>
c.copy(outputConsensusVariants = x)
} text ("Consensus fasta from bam with variants from vcf file, always reference bases else 'N'")
} text "Consensus fasta from bam with variants from vcf file, always reference bases else 'N'"
opt[Unit]("snpsOnly") unbounded () action { (x, c) =>
c.copy(snpsOnly = true)
} text ("Only use snps from vcf file")
} text "Only use snps from vcf file"
opt[String]("sampleName") unbounded () action { (x, c) =>
c.copy(sampleName = x)
} text ("Sample name in vcf file")
} text "Sample name in vcf file"
opt[String]("outputName") required () unbounded () action { (x, c) =>
c.copy(outputName = x)
} text ("Output name in fasta file header")
} text "Output name in fasta file header"
opt[Int]("minAD") unbounded () action { (x, c) =>
c.copy(minAD = x)
} text ("min AD value in vcf file for sample. Defaults to: 8")
} text "min AD value in vcf file for sample. Defaults to: 8"
opt[Int]("minDepth") unbounded () action { (x, c) =>
c.copy(minDepth = x)
} text ("min depth in bam file. Defaults to: 8")
} text "min depth in bam file. Defaults to: 8"
opt[File]("reference") unbounded () action { (x, c) =>
c.copy(reference = x)
} text ("Indexed reference fasta file") validate { x =>
} text "Indexed reference fasta file" validate { x =>
if (x.exists) success else failure("File does not exist: " + x)
}
......@@ -188,7 +188,7 @@ object BastyGenerateFasta extends ToolCommand {
val variants: Map[(Int, Int), VariantContext] = if (cmdArgs.inputVcf != null) {
val reader = new VCFFileReader(cmdArgs.inputVcf, true)
(for (variant <- reader.query(chrName, begin, end) if (!cmdArgs.snpsOnly || variant.isSNP)) yield {
(for (variant <- reader.query(chrName, begin, end) if !cmdArgs.snpsOnly || variant.isSNP) yield {
(variant.getStart, variant.getEnd) -> variant
}).toMap
} else Map()
......@@ -202,10 +202,10 @@ object BastyGenerateFasta extends ToolCommand {
for (t <- s to e) coverage(t - begin) += 1
}
} else {
for (t <- 0 until coverage.length) coverage(t) = cmdArgs.minDepth
for (t <- coverage.indices) coverage(t) = cmdArgs.minDepth
}
val consensus = for (t <- 0 until coverage.length) yield {
val consensus = for (t <- coverage.indices) yield {
if (coverage(t) >= cmdArgs.minDepth) referenceSequence.getBases()(t).toChar
else 'N'
}
......@@ -222,7 +222,7 @@ object BastyGenerateFasta extends ToolCommand {
val stripSufix = if (variant.get._1._2 > end) variant.get._1._2 - end else 0
val allele = getMaxAllele(variant.get._2)
consensusPos += variant.get._2.getReference.getBases.length
buffer.append(allele.substring(stripPrefix, allele.size - stripSufix))
buffer.append(allele.substring(stripPrefix, allele.length - stripSufix))
} else {
buffer.append(consensus(consensusPos))
consensusPos += 1
......@@ -230,7 +230,7 @@ object BastyGenerateFasta extends ToolCommand {
}
}
(chunk -> (consensus.mkString.toUpperCase, buffer.toString.toUpperCase))
chunk -> (consensus.mkString.toUpperCase, buffer.toString().toUpperCase)
}).toMap
if (cmdArgs.outputConsensus != null) {
val writer = new PrintWriter(cmdArgs.outputConsensus)
......@@ -239,7 +239,7 @@ object BastyGenerateFasta extends ToolCommand {
writer.print(chunks(c)._1)
}
writer.println()
writer.close
writer.close()
}
if (cmdArgs.outputConsensusVariants != null) {
val writer = new PrintWriter(cmdArgs.outputConsensusVariants)
......@@ -248,7 +248,7 @@ object BastyGenerateFasta extends ToolCommand {
writer.print(chunks(c)._2)
}
writer.println()
writer.close
writer.close()
}
}
}
......@@ -257,12 +257,12 @@ object BastyGenerateFasta extends ToolCommand {
val writer = new PrintWriter(cmdArgs.outputVariants)
writer.println(">" + cmdArgs.outputName)
val vcfReader = new VCFFileReader(cmdArgs.inputVcf, false)
for (vcfRecord <- vcfReader if (!cmdArgs.snpsOnly || vcfRecord.isSNP)) yield {
for (vcfRecord <- vcfReader if !cmdArgs.snpsOnly || vcfRecord.isSNP) yield {
writer.print(getMaxAllele(vcfRecord))
}
writer.println()
writer.close
vcfReader.close
writer.close()
vcfReader.close()
}
protected def getMaxAllele(vcfRecord: VariantContext): String = {
......@@ -276,6 +276,6 @@ object BastyGenerateFasta extends ToolCommand {
if (AD == null) return fillAllele("", maxSize)
val maxADid = AD.zipWithIndex.maxBy(_._1)._2
if (AD(maxADid) < cmdArgs.minAD) return fillAllele("", maxSize)
return fillAllele(vcfRecord.getAlleles()(maxADid).getBaseString, maxSize)
fillAllele(vcfRecord.getAlleles()(maxADid).getBaseString, maxSize)
}
}
\ No newline at end of file
......@@ -53,19 +53,19 @@ object BedToInterval extends ToolCommand {
bedToInterval.input = inputBed
bedToInterval.bamFile = inputBam
bedToInterval.output = output
return bedToInterval
bedToInterval
}
case class Args(inputFile: File = null, outputFile: File = null, bamFile: File = null) extends AbstractArgs
class OptParser extends AbstractOptParser {
opt[File]('I', "inputFile") required () valueName ("<file>") action { (x, c) =>
opt[File]('I', "inputFile") required () valueName "<file>" action { (x, c) =>
c.copy(inputFile = x)
}
opt[File]('o', "output") required () valueName ("<file>") action { (x, c) =>
opt[File]('o', "output") required () valueName "<file>" action { (x, c) =>
c.copy(outputFile = x)
}
opt[File]('b', "bam") required () valueName ("<file>") action { (x, c) =>
opt[File]('b', "bam") required () valueName "<file>" action { (x, c) =>
c.copy(bamFile = x)
}
}
......@@ -85,12 +85,12 @@ object BedToInterval extends ToolCommand {
writer.write("@SQ\tSN:" + record.getSequenceName + "\tLN:" + record.getSequenceLength + "\n")
record.getSequenceName -> record.getSequenceLength
}
inputSam.close
inputSam.close()
val refsMap = Map(refs: _*)
val bedFile = Source.fromFile(commandArgs.inputFile)
for (
line <- bedFile.getLines;
line <- bedFile.getLines();
split = line.split("\t") if split.size >= 3;
chr = split(0);
start = split(1);
......@@ -102,7 +102,7 @@ object BedToInterval extends ToolCommand {
else {
var strand = "+"
for (t <- 3 until split.length) {
if ((split(t) == "+" || split(t) == "-")) strand = split(t)
if (split(t) == "+" || split(t) == "-") strand = split(t)
}
writer.write(strand)
}
......
......@@ -21,8 +21,7 @@ import nl.lumc.sasc.biopet.core.config.Configurable
import nl.lumc.sasc.biopet.core.{ ToolCommand, ToolCommandFuntion }
import org.broadinstitute.gatk.utils.commandline.{ Input, Output }
import scala.collection.SortedMap
import scala.collection.mutable.Map
import scala.collection.{ mutable, SortedMap }
import scala.io.Source
class BedtoolsCoverageToCounts(val root: Configurable) extends ToolCommandFuntion {
......@@ -45,10 +44,10 @@ object BedtoolsCoverageToCounts extends ToolCommand {
case class Args(input: File = null, output: File = null) extends AbstractArgs
class OptParser extends AbstractOptParser {
opt[File]('I', "input") required () valueName ("<file>") action { (x, c) =>
opt[File]('I', "input") required () valueName "<file>" action { (x, c) =>
c.copy(input = x)
}
opt[File]('o', "output") required () unbounded () valueName ("<file>") action { (x, c) =>
opt[File]('o', "output") required () unbounded () valueName "<file>" action { (x, c) =>
c.copy(output = x)
}
}
......@@ -62,8 +61,8 @@ object BedtoolsCoverageToCounts extends ToolCommand {
if (!commandArgs.input.exists) throw new IllegalStateException("Input file not found, file: " + commandArgs.input)
val counts: Map[String, Long] = Map()
for (line <- Source.fromFile(commandArgs.input).getLines) {
val counts: mutable.Map[String, Long] = mutable.Map()
for (line <- Source.fromFile(commandArgs.input).getLines()) {
val values = line.split("\t")
val gene = values(3)
val count = values(6).toLong
......@@ -77,6 +76,6 @@ object BedtoolsCoverageToCounts extends ToolCommand {
for ((seq, count) <- sortedCounts) {
if (count > 0) writer.println(seq + "\t" + count)
}
writer.close
writer.close()
}
}
\ No newline at end of file
......@@ -25,6 +25,7 @@ import nl.lumc.sasc.biopet.utils.ConfigUtils
import org.broadinstitute.gatk.utils.commandline.{ Input, Output }
import scala.collection.JavaConversions._
import scala.collection.mutable
class BiopetFlagstat(val root: Configurable) extends ToolCommandFuntion with Summarizable {
javaMainClass = getClass.getName
......@@ -63,13 +64,13 @@ object BiopetFlagstat extends ToolCommand {
case class Args(inputFile: File = null, summaryFile: Option[File] = None, region: Option[String] = None) extends AbstractArgs
class OptParser extends AbstractOptParser {
opt[File]('I', "inputFile") required () valueName ("<file>") action { (x, c) =>
opt[File]('I', "inputFile") required () valueName "<file>" action { (x, c) =>
c.copy(inputFile = x)
} text ("input bam file")
opt[File]('s', "summaryFile") valueName ("<file>") action { (x, c) =>
} text "input bam file"
opt[File]('s', "summaryFile") valueName "<file>" action { (x, c) =>
c.copy(summaryFile = Some(x))
} text ("summary output file")
opt[String]('r', "region") valueName ("<chr:start-stop>") action { (x, c) =>
} text "summary output file"
opt[String]('r', "region") valueName "<chr:start-stop>" action { (x, c) =>
c.copy(region = Some(x))
}
}
......@@ -82,7 +83,7 @@ object BiopetFlagstat extends ToolCommand {
val commandArgs: Args = argsParser.parse(args, Args()) getOrElse sys.exit(1)
val inputSam = SamReaderFactory.makeDefault.open(commandArgs.inputFile)
val iterSam = if (commandArgs.region == None) inputSam.iterator else {
val iterSam = if (commandArgs.region.isEmpty) inputSam.iterator else {
val regionRegex = """(.*):(.*)-(.*)""".r
commandArgs.region.get match {
case regionRegex(chr, start, stop) => inputSam.query(chr, start.toInt, stop.toInt, false)
......@@ -91,7 +92,7 @@ object BiopetFlagstat extends ToolCommand {
}
val flagstatCollector = new FlagstatCollector
flagstatCollector.loadDefaultFunctions
flagstatCollector.loadDefaultFunctions()
val m = 10
val max = 60
for (t <- 0 to (max / m))
......@@ -144,11 +145,10 @@ object BiopetFlagstat extends ToolCommand {
}
commandArgs.summaryFile.foreach {
case file => {
case file =>
val writer = new PrintWriter(file)
writer.println(flagstatCollector.summary)
writer.close()
}
}
println(flagstatCollector.report)
......@@ -157,12 +157,12 @@ object BiopetFlagstat extends ToolCommand {
class FlagstatCollector {
private var functionCount = 0
var readsCount = 0
private val names: Map[Int, String] = Map()
private val names: mutable.Map[Int, String] = mutable.Map()
private var functions: Array[SAMRecord => Boolean] = Array()
private var totalCounts: Array[Long] = Array()
private var crossCounts = Array.ofDim[Long](1, 1)
def loadDefaultFunctions {
def loadDefaultFunctions() {
addFunction("All", record => true)
addFunction("Mapped", record => !record.getReadUnmappedFlag)
addFunction("Duplicates", record => record.getDuplicateReadFlag)
......@@ -207,7 +207,7 @@ object BiopetFlagstat extends ToolCommand {
crossCounts = Array.ofDim[Long](functionCount, functionCount)
totalCounts = new Array[Long](functionCount)
val temp = new Array[SAMRecord => Boolean](functionCount)
for (t <- 0 until (temp.size - 1)) temp(t) = functions(t)
for (t <- 0 until (temp.length - 1)) temp(t) = functions(t)
functions = temp
val index = functionCount - 1
......@@ -228,7 +228,7 @@ object BiopetFlagstat extends ToolCommand {
buffer.append(crossReport() + "\n")
buffer.append(crossReport(fraction = true) + "\n")
return buffer.toString
buffer.toString()
}
def summary: String = {
......@@ -236,7 +236,7 @@ object BiopetFlagstat extends ToolCommand {
names(t) -> totalCounts(t)
}).toMap
return ConfigUtils.mapToJson(map).spaces4
ConfigUtils.mapToJson(map).spaces4
}
def crossReport(fraction: Boolean = false): String = {
......@@ -258,7 +258,7 @@ object BiopetFlagstat extends ToolCommand {
else buffer.append("\t")
}
}
return buffer.toString
buffer.toString()
}
} // End of class
......
......@@ -46,19 +46,19 @@ object CheckAllelesVcfInBam extends ToolCommand {
bamFiles: List[File] = Nil, minMapQual: Int = 1) extends AbstractArgs
class OptParser extends AbstractOptParser {
opt[File]('I', "inputFile") required () maxOccurs (1) valueName ("<file>") action { (x, c) =>
opt[File]('I', "inputFile") required () maxOccurs 1 valueName "<file>" action { (x, c) =>
c.copy(inputFile = x)
}
opt[File]('o', "outputFile") required () maxOccurs (1) valueName ("<file>") action { (x, c) =>
opt[File]('o', "outputFile") required () maxOccurs 1 valueName "<file>" action { (x, c) =>
c.copy(outputFile = x)
}
opt[String]('s', "sample") unbounded () minOccurs (1) action { (x, c) =>
opt[String]('s', "sample") unbounded () minOccurs 1 action { (x, c) =>
c.copy(samples = x :: c.samples)
}
opt[File]('b', "bam") unbounded () minOccurs (1) action { (x, c) =>
opt[File]('b', "bam") unbounded () minOccurs 1 action { (x, c) =>
c.copy(bamFiles = x :: c.bamFiles)
}
opt[Int]('m', "min_mapping_quality") maxOccurs (1) action { (x, c) =>
opt[Int]('m', "min_mapping_quality") maxOccurs 1 action { (x, c) =>
c.copy(minMapQual = c.minMapQual)
}
}
......@@ -98,7 +98,7 @@ object CheckAllelesVcfInBam extends ToolCommand {
val refAllele = vcfRecord.getReference.getBaseString
for ((sample, bamReader) <- bamReaders) {
val queryInterval = new QueryInterval(bamHeaders(sample).getSequenceIndex(vcfRecord.getChr),
vcfRecord.getStart, vcfRecord.getStart + refAllele.size - 1)
vcfRecord.getStart, vcfRecord.getStart + refAllele.length - 1)
val bamIter = bamReader.query(Array(queryInterval), false)
def filterRead(samRecord: SAMRecord): Boolean = {
......@@ -112,7 +112,7 @@ object CheckAllelesVcfInBam extends ToolCommand {
countReports(sample).lowMapQualReads += 1
return true
}
return false
false
}
val counts = for (samRecord <- bamIter if !filterRead(samRecord)) {
......@@ -122,7 +122,7 @@ object CheckAllelesVcfInBam extends ToolCommand {
case _ => countReports(sample).notFound += 1
}
}
bamIter.close
bamIter.close()
}
val builder = new VariantContextBuilder(vcfRecord)
......@@ -137,49 +137,49 @@ object CheckAllelesVcfInBam extends ToolCommand {
}
writer.add(builder.make)
}
for ((_, r) <- bamReaders) r.close
reader.close
writer.close
for ((_, r) <- bamReaders) r.close()
reader.close()
writer.close()
}
def checkAlles(samRecord: SAMRecord, vcfRecord: VariantContext): Option[String] = {
val readStartPos = List.range(0, samRecord.getReadBases.length)
.find(x => samRecord.getReferencePositionAtReadPosition(x + 1) == vcfRecord.getStart) getOrElse { return None }
val readBases = samRecord.getReadBases()
val readBases = samRecord.getReadBases
val alleles = vcfRecord.getAlleles.map(x => x.getBaseString)
val refAllele = alleles.head
var maxSize = 1
for (allele <- alleles if allele.size > maxSize) maxSize = allele.size
for (allele <- alleles if allele.length > maxSize) maxSize = allele.length
val readC = for (t <- readStartPos until readStartPos + maxSize if t < readBases.length) yield readBases(t).toChar
val allelesInRead = mutable.Set(alleles.filter(readC.mkString.startsWith(_)): _*)
val allelesInRead = mutable.Set(alleles.filter(readC.mkString.startsWith): _*)
// Removal of insertions that are not really in the cigarstring
for (allele <- allelesInRead if allele.size > refAllele.size) {
val refPos = for (t <- refAllele.size until allele.size) yield samRecord.getReferencePositionAtReadPosition(readStartPos + t + 1)
for (allele <- allelesInRead if allele.length > refAllele.length) {
val refPos = for (t <- refAllele.length until allele.length) yield samRecord.getReferencePositionAtReadPosition(readStartPos + t + 1)
if (refPos.exists(_ > 0)) allelesInRead -= allele
}
// Removal of alleles that are not really in the cigarstring
for (allele <- allelesInRead) {
val readPosAfterAllele = samRecord.getReferencePositionAtReadPosition(readStartPos + allele.size + 1)
val vcfPosAfterAllele = vcfRecord.getStart + refAllele.size
val readPosAfterAllele = samRecord.getReferencePositionAtReadPosition(readStartPos + allele.length + 1)
val vcfPosAfterAllele = vcfRecord.getStart + refAllele.length
if (readPosAfterAllele != vcfPosAfterAllele &&
(refAllele.size != allele.size || (refAllele.size == allele.size && readPosAfterAllele < 0))) allelesInRead -= allele
(refAllele.length != allele.length || (refAllele.length == allele.length && readPosAfterAllele < 0))) allelesInRead -= allele
}
for (allele <- allelesInRead if allele.size >= refAllele.size) {
if (allelesInRead.exists(_.size > allele.size)) allelesInRead -= allele
for (allele <- allelesInRead if allele.length >= refAllele.length) {
if (allelesInRead.exists((_.length) > allele.length)) allelesInRead -= allele
}
if (allelesInRead.contains(refAllele) && allelesInRead.exists(_.size < refAllele.size)) allelesInRead -= refAllele
if (allelesInRead.isEmpty) return None
else if (allelesInRead.size == 1) return Some(allelesInRead.head)
if (allelesInRead.contains(refAllele) && allelesInRead.exists((_.length) < refAllele.length)) allelesInRead -= refAllele
if (allelesInRead.isEmpty) None
else if (allelesInRead.size == 1) Some(allelesInRead.head)
else {
logger.warn("vcfRecord: " + vcfRecord)