diff --git a/biopet-framework/src/main/scala/nl/lumc/sasc/biopet/tools/WipeReads.scala b/biopet-framework/src/main/scala/nl/lumc/sasc/biopet/tools/WipeReads.scala
index 4e3d118cea246117ec9f7724cda6a9b177334fdd..df84078c72a02891acad76bfecc3c18bdfe9565a 100644
--- a/biopet-framework/src/main/scala/nl/lumc/sasc/biopet/tools/WipeReads.scala
+++ b/biopet-framework/src/main/scala/nl/lumc/sasc/biopet/tools/WipeReads.scala
@@ -4,7 +4,7 @@
  */
 package nl.lumc.sasc.biopet.tools
 
-import java.io.{ File, IOException }
+import java.io.File
 
 import scala.collection.JavaConverters._
 
@@ -38,10 +38,10 @@ class WipeReads(val root: Configurable) extends BiopetJavaCommandLineFunction {
   javaMainClass = getClass.getName
 
   @Input(doc = "Input BAM file (must be indexed)", shortName = "I", required = true)
-  var inputBAM: File = _
+  var inputBam: File = _
 
   @Output(doc = "Output BAM", shortName = "o", required = true)
-  var outputBAM: File = _
+  var outputBam: File = _
 
 }
 
@@ -61,8 +61,10 @@ object WipeReads extends ToolCommand {
     else
       feat1.getStart <= feat2.getStart && feat1.getEnd >= feat2.getStart
 
-  /** Function to merge two overlapping intervals
-    * NOTE: we assume subtypes of Feature has a constructor with (chr, start, end) signature */
+  /**
+   * Function to merge two overlapping intervals
+   * NOTE: we assume subtypes of Feature has a constructor with (chr, start, end) signature
+   */
   private def merge[T <: Feature](feat1: T, feat2: T): T = {
     if (feat1.getChr != feat2.getChr)
       throw new IllegalArgumentException("Can not merge features from different chromosomes")
@@ -72,7 +74,7 @@ object WipeReads extends ToolCommand {
     else if (overlaps(feat1, feat2))
       new BasicFeature(feat1.getChr, feat1.getStart, feat2.getEnd).asInstanceOf[T]
     else
-      throw new IllegalArgumentException("Can not merge non-overlapping RawInterval objects")
+      throw new IllegalArgumentException("Can not merge non-overlapping Feature objects")
   }
 
   /** Function to create an iterator yielding non-overlapped Feature objects */
@@ -80,11 +82,13 @@ object WipeReads extends ToolCommand {
     ri.toList
       .sortBy(x => (x.getChr, x.getStart, x.getEnd))
       .foldLeft(List.empty[T]) {
-      (acc, i) => acc match {
-        case head :: tail if overlaps(head, i)  => merge(head, i) :: tail
-        case _                                  => i :: acc
+        (acc, i) =>
+          acc match {
+            case head :: tail if overlaps(head, i) => merge(head, i) :: tail
+            case _                                 => i :: acc
 
-      }}
+          }
+      }
       .toIterator
 
   /**
@@ -97,70 +101,60 @@ object WipeReads extends ToolCommand {
     logger.info("Parsing interval file ...")
 
     /** Function to create iterator from BED file */
-    def makeFeatureFromBED(inFile: File): Iterator[Feature] =
+    def makeFeatureFromBed(inFile: File): Iterator[Feature] =
       asScalaIteratorConverter(getFeatureReader(inFile.toPath.toString, new BEDCodec(), false).iterator).asScala
 
-    // TODO: implementation
     /** Function to create iterator from refFlat file */
     def makeFeatureFromRefFlat(inFile: File): Iterator[Feature] = ???
-    // convert coordinate to 1-based fully closed
-    // parse chrom, start blocks, end blocks, strands
+      // convert coordinate to 1-based fully closed
+      // parse chrom, start blocks, end blocks, strands
 
-    // TODO: implementation
     /** Function to create iterator from GTF file */
-    def makeFeatureFromGTF(inFile: File): Iterator[Feature] = ???
-    // convert coordinate to 1-based fully closed
-    // parse chrom, start blocks, end blocks, strands
+    def makeFeatureFromGtf(inFile: File): Iterator[Feature] = ???
+        // convert coordinate to 1-based fully closed
+        // parse chrom, start blocks, end blocks, strands
 
     // detect interval file format from extension
     val iterFunc: (File => Iterator[Feature]) =
       if (getExtension(inFile.toString.toLowerCase) == "bed")
-        makeFeatureFromBED
+        makeFeatureFromBed
       else
         throw new IllegalArgumentException("Unexpected interval file type: " + inFile.getPath)
 
     mergeOverlappingIntervals(iterFunc(inFile))
   }
 
-  def bloomParamsOk(bloomSize: Long, bloomFp: Double): Boolean = {
-    val optimalArraySize = FilterBuilder.optimalM(bloomSize, bloomFp)
-    // we are currently limited to maximum integer size
-    // if the optimal array size equals or exceeds it, we assume
-    // that it's a result of a truncation and return false
-    optimalArraySize <= Int.MaxValue
-  }
-
   // TODO: set minimum fraction for overlap
   /**
    * Function to create function to check SAMRecord for exclusion in filtered BAM file.
    *
    * The returned function evaluates all filtered-in SAMRecord to false.
    *
-   * @param iv iterator yielding RawInterval objects
-   * @param inBAM input BAM file
-   * @param inBAMIndex input BAM file index
+   * @param iv iterator yielding Feature objects
+   * @param inBam input BAM file
+   * @param inBamIndex input BAM file index
    * @param filterOutMulti whether to filter out reads with same name outside target region (default: true)
    * @param minMapQ minimum MapQ of reads in target region to filter out (default: 0)
-   * @param readGroupIDs read group IDs of reads in target region to filter out (default: all IDs)
+   * @param readGroupIds read group IDs of reads in target region to filter out (default: all IDs)
    * @param bloomSize expected size of elements to contain in the Bloom filter
    * @param bloomFp expected Bloom filter false positive rate
    * @return function that checks whether a SAMRecord or String is to be excluded
    */
   def makeFilterOutFunction(iv: Iterator[Feature],
-                            inBAM: File, inBAMIndex: File = null,
+                            inBam: File, inBamIndex: File = null,
                             filterOutMulti: Boolean = true,
-                            minMapQ: Int = 0, readGroupIDs: Set[String] = Set(),
+                            minMapQ: Int = 0, readGroupIds: Set[String] = Set(),
                             bloomSize: Long, bloomFp: Double): (SAMRecord => Boolean) = {
 
     logger.info("Building set of reads to exclude ...")
 
     // TODO: implement optional index creation
     /** Function to check for BAM file index and return a SAMFileReader given a File */
-    def prepIndexedInputBAM(): SAMFileReader =
-      if (inBAMIndex != null)
-        new SAMFileReader(inBAM, inBAMIndex)
+    def prepIndexedInputBam(): SAMFileReader =
+      if (inBamIndex != null)
+        new SAMFileReader(inBam, inBamIndex)
       else {
-        val sfr = new SAMFileReader(inBAM)
+        val sfr = new SAMFileReader(inBam)
         sfr.setValidationStringency(SAMFileReader.ValidationStringency.LENIENT)
         if (!sfr.hasIndex)
           throw new IllegalStateException("Input BAM file must be indexed")
@@ -174,27 +168,28 @@ object WipeReads extends ToolCommand {
      * The function still works when only either one of the interval or BAM
      * file contig is prepended with "chr"
      *
-     * @param inBAM BAM file to query as SAMFileReader
+     * @param inBam BAM file to query as SAMFileReader
      * @param feat feature object containing query
      * @return QueryInterval wrapped in Option
      */
-    def monadicMakeQueryInterval(inBAM: SAMFileReader, feat: Feature): Option[QueryInterval] =
-      if (inBAM.getFileHeader.getSequenceIndex(feat.getChr) > -1)
-        Some(inBAM.makeQueryInterval(feat.getChr, feat.getStart, feat.getEnd))
+    def monadicMakeQueryInterval(inBam: SAMFileReader, feat: Feature): Option[QueryInterval] =
+      if (inBam.getFileHeader.getSequenceIndex(feat.getChr) > -1)
+        Some(inBam.makeQueryInterval(feat.getChr, feat.getStart, feat.getEnd))
       else if (feat.getChr.startsWith("chr")
-        && inBAM.getFileHeader.getSequenceIndex(feat.getChr.substring(3)) > -1)
-        Some(inBAM.makeQueryInterval(feat.getChr.substring(3), feat.getStart, feat.getEnd))
+        && inBam.getFileHeader.getSequenceIndex(feat.getChr.substring(3)) > -1)
+        Some(inBam.makeQueryInterval(feat.getChr.substring(3), feat.getStart, feat.getEnd))
       else if (!feat.getChr.startsWith("chr")
-        && inBAM.getFileHeader.getSequenceIndex("chr" + feat.getChr) > -1)
-        Some(inBAM.makeQueryInterval("chr" + feat.getChr, feat.getStart, feat.getEnd))
+        && inBam.getFileHeader.getSequenceIndex("chr" + feat.getChr) > -1)
+        Some(inBam.makeQueryInterval("chr" + feat.getChr, feat.getStart, feat.getEnd))
       else
         None
 
-    /** function to make IntervalTree from our RawInterval objects
-      *
-      * @param ifeat iterable over feature objects
-      * @return IntervalTree objects, filled with intervals from RawInterval
-      */
+    /**
+     * function to make IntervalTree from our Feature objects
+     *
+     * @param ifeat iterable over feature objects
+     * @return IntervalTree objects, filled with intervals from Feature
+     */
     def makeIntervalTree[T <: Feature](ifeat: Iterable[T]): IntervalTree = {
       val ivt = new IntervalTree
       for (iv <- ifeat)
@@ -228,39 +223,43 @@ object WipeReads extends ToolCommand {
 
     /** filter function for read IDs */
     val rgFilter =
-      if (readGroupIDs.size == 0)
+      if (readGroupIds.size == 0)
         (r: SAMRecord) => true
       else
-        (r: SAMRecord) => readGroupIDs.contains(r.getReadGroup.getReadGroupId)
+        (r: SAMRecord) => readGroupIds.contains(r.getReadGroup.getReadGroupId)
 
     /** function to get set element */
-    val SAMRecordElement =
+    val SamRecordElement =
       if (filterOutMulti)
         (r: SAMRecord) => r.getReadName
       else
         (r: SAMRecord) => r.getReadName + "_" + r.getAlignmentStart.toString
 
-    val SAMRecordMateElement =
-      (r: SAMRecord)   => r.getReadName + "_" + r.getMateAlignmentStart.toString
+    val SamRecordMateElement =
+      (r: SAMRecord) => r.getReadName + "_" + r.getMateAlignmentStart.toString
 
-    val firstBAM = prepIndexedInputBAM()
+    val readyBam = prepIndexedInputBam()
 
     /* NOTE: the interval vector here should be bypass-able if we can make
              the BAM query intervals with Interval objects. This is not possible
-             at the moment since we can not retrieve star and end coordinates
-             of an Interval, so we resort to our own RawInterval vector
+             at the moment since we can not retrieve start and end coordinates
+             of an Interval, so we resort to our own Feature vector
     */
     lazy val intervals = iv.toVector
     lazy val intervalTreeMap: Map[String, IntervalTree] = intervals
       .groupBy(x => x.getChr)
       .map({ case (key, value) => (key, makeIntervalTree(value)) })
     lazy val queryIntervals = intervals
-      .flatMap(x => monadicMakeQueryInterval(firstBAM, x))
+      .flatMap(x => monadicMakeQueryInterval(readyBam, x))
       // makeQueryInterval only accepts a sorted QueryInterval list
       .sortBy(x => (x.referenceIndex, x.start, x.end))
       .toArray
 
-    val filteredRecords: Iterator[SAMRecord] = firstBAM.queryOverlapping(queryIntervals).asScala
+    val filteredRecords: Iterator[SAMRecord] = readyBam
+      // query BAM file with intervals
+      .queryOverlapping(queryIntervals)
+      // for compatibility
+      .asScala
       // ensure spliced reads have at least one block overlapping target region
       .filter(x => alignmentBlockOverlaps(x, intervalTreeMap))
       // filter for MAPQ on target region reads
@@ -275,11 +274,10 @@ object WipeReads extends ToolCommand {
     for (rec <- filteredRecords) {
       logger.debug("Adding read " + rec.getReadName + " to set ...")
       if ((!filterOutMulti) && rec.getReadPairedFlag) {
-        filteredOutSet.add(SAMRecordElement(rec))
-        filteredOutSet.add(SAMRecordMateElement(rec))
-      }
-      else
-        filteredOutSet.add(SAMRecordElement(rec))
+        filteredOutSet.add(SamRecordElement(rec))
+        filteredOutSet.add(SamRecordMateElement(rec))
+      } else
+        filteredOutSet.add(SamRecordElement(rec))
     }
 
     if (filterOutMulti)
@@ -287,40 +285,39 @@ object WipeReads extends ToolCommand {
     else
       (rec: SAMRecord) => {
         if (rec.getReadPairedFlag)
-          filteredOutSet.contains(SAMRecordElement(rec)) &&
-          filteredOutSet.contains(SAMRecordMateElement(rec))
+          filteredOutSet.contains(SamRecordElement(rec)) &&
+            filteredOutSet.contains(SamRecordMateElement(rec))
         else
-          filteredOutSet.contains(SAMRecordElement(rec))
+          filteredOutSet.contains(SamRecordElement(rec))
       }
   }
 
-  // TODO: implement stats file output
   /**
    * Function to filter input BAM and write its output to the filesystem
    *
    * @param filterFunc filter function that evaluates true for excluded SAMRecord
-   * @param inBAM input BAM file
-   * @param outBAM output BAM file
+   * @param inBam input BAM file
+   * @param outBam output BAM file
    * @param writeIndex whether to write index for output BAM file
    * @param async whether to write asynchronously
-   * @param filteredOutBAM whether to write excluded SAMRecords to their own BAM file
+   * @param filteredOutBam whether to write excluded SAMRecords to their own BAM file
    */
-  def writeFilteredBAM(filterFunc: (SAMRecord => Boolean), inBAM: File, outBAM: File,
+  def writeFilteredBam(filterFunc: (SAMRecord => Boolean), inBam: File, outBam: File,
                        writeIndex: Boolean = true, async: Boolean = true,
-                       filteredOutBAM: File = null) = {
+                       filteredOutBam: File = null) = {
 
     val factory = new SAMFileWriterFactory()
       .setCreateIndex(writeIndex)
       .setUseAsyncIo(async)
 
-    val templateBAM = new SAMFileReader(inBAM)
-    templateBAM.setValidationStringency(SAMFileReader.ValidationStringency.LENIENT)
+    val templateBam = new SAMFileReader(inBam)
+    templateBam.setValidationStringency(SAMFileReader.ValidationStringency.LENIENT)
 
-    val targetBAM = factory.makeBAMWriter(templateBAM.getFileHeader, true, outBAM)
+    val targetBam = factory.makeBAMWriter(templateBam.getFileHeader, true, outBam)
 
-    val filteredBAM =
-      if (filteredOutBAM != null)
-        factory.makeBAMWriter(templateBAM.getFileHeader, true, filteredOutBAM)
+    val filteredBam =
+      if (filteredOutBam != null)
+        factory.makeBAMWriter(templateBam.getFileHeader, true, filteredOutBam)
       else
         null
 
@@ -328,35 +325,41 @@ object WipeReads extends ToolCommand {
 
     try {
       var (incl, excl) = (0, 0)
-      for (rec <- templateBAM.asScala) {
+      for (rec <- templateBam.asScala) {
         if (!filterFunc(rec)) {
-          targetBAM.addAlignment(rec)
+          targetBam.addAlignment(rec)
           incl += 1
-        }
-        else {
+        } else {
           excl += 1
-          if (filteredBAM != null) filteredBAM.addAlignment(rec)
+          if (filteredBam != null) filteredBam.addAlignment(rec)
         }
       }
       println(List("input_bam", "output_bam", "count_included", "count_excluded").mkString("\t"))
-      println(List(inBAM.getName, outBAM.getName, incl, excl).mkString("\t"))
+      println(List(inBam.getName, outBam.getName, incl, excl).mkString("\t"))
     } finally {
-      templateBAM.close()
-      targetBAM.close()
-      if (filteredBAM != null) filteredBAM.close()
+      templateBam.close()
+      targetBam.close()
+      if (filteredBam != null) filteredBam.close()
     }
   }
 
-  case class Args (inputBAM: File = null,
-                   targetRegions: File = null,
-                   outputBAM: File = null,
-                   filteredOutBAM: File = null,
-                   readGroupIDs: Set[String] = Set.empty[String],
-                   minMapQ: Int = 0,
-                   limitToRegion: Boolean = false,
-                   noMakeIndex: Boolean = false,
-                   bloomSize: Long = 70000000,
-                   bloomFp: Double = 4e-7) extends AbstractArgs
+  /** Function to check whether the bloom filter can fulfill size and false positive guarantees
+      As we are currently limited to maximum integer size if the optimal array size equals or
+      exceeds it, we assume that it's a result of a truncation and return false.
+  */
+  def bloomParamsOk(bloomSize: Long, bloomFp: Double): Boolean =
+    FilterBuilder.optimalM(bloomSize, bloomFp) <= Int.MaxValue
+
+  case class Args(inputBam: File = null,
+                  targetRegions: File = null,
+                  outputBam: File = null,
+                  filteredOutBam: File = null,
+                  readGroupIds: Set[String] = Set.empty[String],
+                  minMapQ: Int = 0,
+                  limitToRegion: Boolean = false,
+                  noMakeIndex: Boolean = false,
+                  bloomSize: Long = 70000000,
+                  bloomFp: Double = 4e-7) extends AbstractArgs
 
   class OptParser extends AbstractOptParser {
 
@@ -365,43 +368,53 @@ object WipeReads extends ToolCommand {
         |$commandName - Region-based reads removal from an indexed BAM file
       """.stripMargin)
 
-    opt[File]('I', "input_file") required() valueName "<bam>" action { (x, c) =>
-      c.copy(inputBAM = x) } validate {
-        x => if (x.exists) success else failure("Input BAM file not found")
-      } text "Input BAM file"
-
-    opt[File]('r', "interval_file") required() valueName "<bed>" action { (x, c) =>
-      c.copy(targetRegions = x) } validate {
-        x => if (x.exists) success else failure("Target regions file not found")
-      } text "Interval BED file"
-
-    opt[File]('o', "output_file") required() valueName "<bam>" action { (x, c) =>
-      c.copy(outputBAM = x) } text "Output BAM file"
-
-    opt[File]('f', "discarded_file") optional() valueName "<bam>" action { (x, c) =>
-      c.copy(filteredOutBAM = x) } text "Discarded reads BAM file (default: none)"
-
-    opt[Int]('Q', "min_mapq") optional() action { (x, c) =>
-      c.copy(minMapQ = x) } text "Minimum MAPQ of reads in target region to remove (default: 0)"
-
-    opt[String]('G', "read_group") unbounded() optional() valueName "<rgid>" action { (x, c) =>
-      c.copy(readGroupIDs = c.readGroupIDs + x) } text "Read group IDs to be removed (default: remove reads from all read groups)"
-
-    opt[Unit]("limit_removal") optional() action { (_, c) =>
-      c.copy(limitToRegion = true) } text
+    opt[File]('I', "input_file") required () valueName "<bam>" action { (x, c) =>
+      c.copy(inputBam = x)
+    } validate {
+      x => if (x.exists) success else failure("Input BAM file not found")
+    } text "Input BAM file"
+
+    opt[File]('r', "interval_file") required () valueName "<bed>" action { (x, c) =>
+      c.copy(targetRegions = x)
+    } validate {
+      x => if (x.exists) success else failure("Target regions file not found")
+    } text "Interval BED file"
+
+    opt[File]('o', "output_file") required () valueName "<bam>" action { (x, c) =>
+      c.copy(outputBam = x)
+    } text "Output BAM file"
+
+    opt[File]('f', "discarded_file") optional () valueName "<bam>" action { (x, c) =>
+      c.copy(filteredOutBam = x)
+    } text "Discarded reads BAM file (default: none)"
+
+    opt[Int]('Q', "min_mapq") optional () action { (x, c) =>
+      c.copy(minMapQ = x)
+    } text "Minimum MAPQ of reads in target region to remove (default: 0)"
+
+    opt[String]('G', "read_group") unbounded () optional () valueName "<rgid>" action { (x, c) =>
+      c.copy(readGroupIds = c.readGroupIds + x)
+    } text "Read group IDs to be removed (default: remove reads from all read groups)"
+
+    opt[Unit]("limit_removal") optional () action { (_, c) =>
+      c.copy(limitToRegion = true)
+    } text
       "Whether to remove multiple-mapped reads outside the target regions (default: yes)"
 
-    opt[Unit]("no_make_index") optional() action { (_, c) =>
-      c.copy(noMakeIndex = true) } text
+    opt[Unit]("no_make_index") optional () action { (_, c) =>
+      c.copy(noMakeIndex = true)
+    } text
       "Whether to index output BAM file or not (default: yes)"
 
     note("\nAdvanced options")
 
-    opt[Long]("bloom_size") optional() action { (x, c) =>
-      c.copy(bloomSize = x) } text "expected maximum number of reads in target regions (default: 7e7)"
+    opt[Long]("bloom_size") optional () action { (x, c) =>
+      c.copy(bloomSize = x)
+    } text "expected maximum number of reads in target regions (default: 7e7)"
 
-    opt[Double]("false_positive") optional() action { (x, c) =>
-      c.copy(bloomFp = x) } text "false positive rate (default: 4e-7)"
+    opt[Double]("false_positive") optional () action { (x, c) =>
+      c.copy(bloomFp = x)
+    } text "false positive rate (default: 4e-7)"
 
     note(
       """
@@ -410,7 +423,7 @@ object WipeReads extends ToolCommand {
         |the given ones, they will also be removed.
       """.stripMargin)
 
-    checkConfig { c=>
+    checkConfig { c =>
       if (!bloomParamsOk(c.bloomSize, c.bloomFp))
         failure("Bloom parameters combination exceed Int limitation")
       else
@@ -426,20 +439,20 @@ object WipeReads extends ToolCommand {
 
     val filterFunc = makeFilterOutFunction(
       iv = makeFeatureFromFile(commandArgs.targetRegions),
-      inBAM = commandArgs.inputBAM,
+      inBam = commandArgs.inputBam,
       filterOutMulti = !commandArgs.limitToRegion,
       minMapQ = commandArgs.minMapQ,
-      readGroupIDs = commandArgs.readGroupIDs,
+      readGroupIds = commandArgs.readGroupIds,
       bloomSize = commandArgs.bloomSize,
       bloomFp = commandArgs.bloomFp
     )
 
-    writeFilteredBAM(
+    writeFilteredBam(
       filterFunc,
-      commandArgs.inputBAM,
-      commandArgs.outputBAM,
+      commandArgs.inputBam,
+      commandArgs.outputBam,
       writeIndex = !commandArgs.noMakeIndex,
-      filteredOutBAM = commandArgs.filteredOutBAM
+      filteredOutBam = commandArgs.filteredOutBam
     )
   }
 }
diff --git a/biopet-framework/src/test/scala/nl/lumc/sasc/biopet/tools/WipeReadsUnitTest.scala b/biopet-framework/src/test/scala/nl/lumc/sasc/biopet/tools/WipeReadsUnitTest.scala
index ee438ae74e82bf157a013360daf9b20d5a1647b6..9639226aeccdc749a8fd9cb014f30210f69aafa7 100644
--- a/biopet-framework/src/test/scala/nl/lumc/sasc/biopet/tools/WipeReadsUnitTest.scala
+++ b/biopet-framework/src/test/scala/nl/lumc/sasc/biopet/tools/WipeReadsUnitTest.scala
@@ -6,6 +6,7 @@ package nl.lumc.sasc.biopet.tools
 
 import java.io.File
 import java.nio.file.Paths
+import scala.collection.JavaConverters._
 
 import htsjdk.samtools._
 import htsjdk.tribble._
@@ -13,17 +14,14 @@ import org.scalatest.Matchers
 import org.scalatest.testng.TestNGSuite
 import org.testng.annotations.Test
 
-import scala.collection.JavaConverters._
-
-
 class WipeReadsUnitTest extends TestNGSuite with Matchers {
 
-  import nl.lumc.sasc.biopet.tools.WipeReads._
+  import WipeReads._
 
   private def resourcePath(p: String): String =
     Paths.get(getClass.getResource(p).toURI).toString
 
-  private lazy val samP: SAMLineParser = {
+  private val samP: SAMLineParser = {
     val samh = new SAMFileHeader
     samh.addSequence(new SAMSequenceRecord("chrQ", 10000))
     samh.addReadGroup(new SAMReadGroupRecord("001"))
@@ -31,20 +29,20 @@ class WipeReadsUnitTest extends TestNGSuite with Matchers {
     new SAMLineParser(samh)
   }
 
-  private def makeSAMs(raws: String*): Seq[SAMRecord] =
+  private def makeSams(raws: String*): Seq[SAMRecord] =
     raws.map(s => samP.parseLine(s))
 
-  private def makeTempBAM(): File =
+  private def makeTempBam(): File =
     File.createTempFile("WipeReads", java.util.UUID.randomUUID.toString + ".bam")
 
-  private def makeTempBAMIndex(bam: File): File =
+  private def makeTempBamIndex(bam: File): File =
     new File(bam.getAbsolutePath.stripSuffix(".bam") + ".bai")
 
   val bloomSize: Long = 1000
   val bloomFp: Double = 1e-10
 
-  val sBAMFile1 = new File(resourcePath("/single01.bam"))
-  val sBAMRecs1 = makeSAMs(
+  val sBamFile1 = new File(resourcePath("/single01.bam"))
+  val sBamRecs1 = makeSams(
     "r02\t0\tchrQ\t50\t60\t10M\t*\t0\t0\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
     "r01\t16\tchrQ\t190\t60\t10M\t*\t0\t0\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
     "r01\t16\tchrQ\t290\t60\t10M\t*\t0\t0\tGGGGGAAAAA\tGGGGGGGGGG\tRG:Z:001",
@@ -54,8 +52,8 @@ class WipeReadsUnitTest extends TestNGSuite with Matchers {
     "r06\t4\t*\t0\t0\t*\t*\t0\t0\tATATATATAT\tHIHIHIHIHI\tRG:Z:001"
   )
 
-  val sBAMFile2 = new File(resourcePath("/single02.bam"))
-  val sBAMRecs2 = makeSAMs(
+  val sBamFile2 = new File(resourcePath("/single02.bam"))
+  val sBamRecs2 = makeSams(
     "r02\t0\tchrQ\t50\t60\t10M\t*\t0\t0\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
     "r01\t16\tchrQ\t190\t30\t10M\t*\t0\t0\tGGGGGAAAAA\tGGGGGGGGGG\tRG:Z:002",
     "r01\t16\tchrQ\t290\t30\t10M\t*\t0\t0\tGGGGGAAAAA\tGGGGGGGGGG\tRG:Z:002",
@@ -66,11 +64,11 @@ class WipeReadsUnitTest extends TestNGSuite with Matchers {
     "r08\t4\t*\t0\t0\t*\t*\t0\t0\tATATATATAT\tHIHIHIHIHI\tRG:Z:002"
   )
 
-  val sBAMFile3 = new File(resourcePath("/single03.bam"))
-  val sBAMFile4 = new File(resourcePath("/single04.bam"))
+  val sBamFile3 = new File(resourcePath("/single03.bam"))
+  val sBamFile4 = new File(resourcePath("/single04.bam"))
 
-  val pBAMFile1 = new File(resourcePath("/paired01.bam"))
-  val pBAMRecs1 = makeSAMs(
+  val pBamFile1 = new File(resourcePath("/paired01.bam"))
+  val pBamRecs1 = makeSams(
     "r02\t99\tchrQ\t50\t60\t10M\t=\t90\t50\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
     "r02\t147\tchrQ\t90\t60\t10M\t=\t50\t-50\tATGCATGCAT\tEEFFGGHHII\tRG:Z:001",
     "r01\t163\tchrQ\t150\t60\t10M\t=\t190\t50\tAAAAAGGGGG\tGGGGGGGGGG\tRG:Z:001",
@@ -87,8 +85,8 @@ class WipeReadsUnitTest extends TestNGSuite with Matchers {
     "r06\t4\t*\t0\t0\t*\t*\t0\t0\tGCGCGCGCGC\tHIHIHIHIHI\tRG:Z:001"
   )
 
-  val pBAMFile2 = new File(resourcePath("/paired02.bam"))
-  val pBAMRecs2 = makeSAMs(
+  val pBamFile2 = new File(resourcePath("/paired02.bam"))
+  val pBamRecs2 = makeSams(
     "r02\t99\tchrQ\t50\t60\t10M\t=\t90\t50\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
     "r02\t147\tchrQ\t90\t60\t10M\t=\t50\t-50\tATGCATGCAT\tEEFFGGHHII\tRG:Z:001",
     "r01\t163\tchrQ\t150\t30\t10M\t=\t190\t50\tAAAAAGGGGG\tGGGGGGGGGG\tRG:Z:002",
@@ -101,22 +99,22 @@ class WipeReadsUnitTest extends TestNGSuite with Matchers {
     "r08\t4\t*\t0\t0\t*\t*\t0\t0\tGCGCGCGCGC\tHIHIHIHIHI\tRG:Z:002"
   )
 
-  val pBAMFile3 = new File(resourcePath("/paired03.bam"))
-  val BEDFile1 = new File(resourcePath("/rrna01.bed"))
-  val minArgList = List("-I", sBAMFile1.toString, "-l", BEDFile1.toString, "-o", "mock.bam")
-
-  @Test def testMakeFeatureFromBED() = {
-    val intervals: Vector[Feature] = makeFeatureFromFile(BEDFile1).toVector
-    intervals.length should be (3)
-    intervals.head.getChr should === ("chrQ")
-    intervals.head.getStart should be (991)
-    intervals.head.getEnd should be (1000)
-    intervals.last.getChr should === ("chrQ")
-    intervals.last.getStart should be (291)
-    intervals.last.getEnd should be (320)
+  val pBamFile3 = new File(resourcePath("/paired03.bam"))
+  val BedFile1 = new File(resourcePath("/rrna01.bed"))
+  val minArgList = List("-I", sBamFile1.toString, "-l", BedFile1.toString, "-o", "mock.bam")
+
+  @Test def testMakeFeatureFromBed() = {
+    val intervals: Vector[Feature] = makeFeatureFromFile(BedFile1).toVector
+    intervals.length should be(3)
+    intervals.head.getChr should ===("chrQ")
+    intervals.head.getStart should be(991)
+    intervals.head.getEnd should be(1000)
+    intervals.last.getChr should ===("chrQ")
+    intervals.last.getStart should be(291)
+    intervals.last.getEnd should be(320)
   }
 
-  @Test def testSingleBAMDefault() = {
+  @Test def testSingleBamDefault() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320), // overlaps r01, second hit,
       new BasicFeature("chrQ", 451, 480), // overlaps r04
@@ -125,317 +123,317 @@ class WipeReadsUnitTest extends TestNGSuite with Matchers {
     // NOTE: while it's possible to have our filter produce false positives
     //       it is highly unlikely in our test cases as we are setting a very low FP rate
     //       and only filling the filter with a few items
-    val isFilteredOut = makeFilterOutFunction(intervals, sBAMFile1, bloomSize = bloomSize, bloomFp = bloomFp)
+    val isFilteredOut = makeFilterOutFunction(intervals, sBamFile1, bloomSize = bloomSize, bloomFp = bloomFp)
     // by default, set elements are SAM record read names
-    assert(!isFilteredOut(sBAMRecs1(0)))
-    assert(isFilteredOut(sBAMRecs1(1)))
-    assert(isFilteredOut(sBAMRecs1(2)))
-    assert(isFilteredOut(sBAMRecs1(3)))
-    assert(!isFilteredOut(sBAMRecs1(4)))
-    assert(!isFilteredOut(sBAMRecs1(5)))
-    assert(!isFilteredOut(sBAMRecs1(6)))
+    assert(!isFilteredOut(sBamRecs1(0)))
+    assert(isFilteredOut(sBamRecs1(1)))
+    assert(isFilteredOut(sBamRecs1(2)))
+    assert(isFilteredOut(sBamRecs1(3)))
+    assert(!isFilteredOut(sBamRecs1(4)))
+    assert(!isFilteredOut(sBamRecs1(5)))
+    assert(!isFilteredOut(sBamRecs1(6)))
   }
 
-  @Test def testSingleBAMIntervalWithoutChr() = {
+  @Test def testSingleBamIntervalWithoutChr() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("Q", 291, 320),
       new BasicFeature("chrQ", 451, 480),
       new BasicFeature("P", 191, 480)
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, sBAMFile1, bloomSize = bloomSize, bloomFp = bloomFp)
-    assert(!isFilteredOut(sBAMRecs1(0)))
-    assert(isFilteredOut(sBAMRecs1(1)))
-    assert(isFilteredOut(sBAMRecs1(2)))
-    assert(isFilteredOut(sBAMRecs1(3)))
-    assert(!isFilteredOut(sBAMRecs1(4)))
-    assert(!isFilteredOut(sBAMRecs1(5)))
-    assert(!isFilteredOut(sBAMRecs1(6)))
+    val isFilteredOut = makeFilterOutFunction(intervals, sBamFile1, bloomSize = bloomSize, bloomFp = bloomFp)
+    assert(!isFilteredOut(sBamRecs1(0)))
+    assert(isFilteredOut(sBamRecs1(1)))
+    assert(isFilteredOut(sBamRecs1(2)))
+    assert(isFilteredOut(sBamRecs1(3)))
+    assert(!isFilteredOut(sBamRecs1(4)))
+    assert(!isFilteredOut(sBamRecs1(5)))
+    assert(!isFilteredOut(sBamRecs1(6)))
   }
 
-  @Test def testSingleBAMDefaultPartialExonOverlap() = {
+  @Test def testSingleBamDefaultPartialExonOverlap() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 881, 1000) // overlaps first exon of r05
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, sBAMFile1, bloomSize = bloomSize, bloomFp = bloomFp)
-    assert(!isFilteredOut(sBAMRecs1(0)))
-    assert(!isFilteredOut(sBAMRecs1(1)))
-    assert(!isFilteredOut(sBAMRecs1(2)))
-    assert(!isFilteredOut(sBAMRecs1(3)))
-    assert(!isFilteredOut(sBAMRecs1(4)))
-    assert(isFilteredOut(sBAMRecs1(5)))
-    assert(!isFilteredOut(sBAMRecs1(6)))
+    val isFilteredOut = makeFilterOutFunction(intervals, sBamFile1, bloomSize = bloomSize, bloomFp = bloomFp)
+    assert(!isFilteredOut(sBamRecs1(0)))
+    assert(!isFilteredOut(sBamRecs1(1)))
+    assert(!isFilteredOut(sBamRecs1(2)))
+    assert(!isFilteredOut(sBamRecs1(3)))
+    assert(!isFilteredOut(sBamRecs1(4)))
+    assert(isFilteredOut(sBamRecs1(5)))
+    assert(!isFilteredOut(sBamRecs1(6)))
   }
 
-  @Test def testSingleBAMDefaultNoExonOverlap() = {
+  @Test def testSingleBamDefaultNoExonOverlap() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrP", 881, 1000),
       new BasicFeature("chrQ", 900, 920)
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, sBAMFile1, bloomSize = bloomSize, bloomFp = bloomFp)
-    assert(!isFilteredOut(sBAMRecs1(0)))
-    assert(!isFilteredOut(sBAMRecs1(1)))
-    assert(!isFilteredOut(sBAMRecs1(2)))
-    assert(!isFilteredOut(sBAMRecs1(3)))
-    assert(!isFilteredOut(sBAMRecs1(4)))
-    assert(!isFilteredOut(sBAMRecs1(5)))
-    assert(!isFilteredOut(sBAMRecs1(5)))
-    assert(!isFilteredOut(sBAMRecs1(6)))
+    val isFilteredOut = makeFilterOutFunction(intervals, sBamFile1, bloomSize = bloomSize, bloomFp = bloomFp)
+    assert(!isFilteredOut(sBamRecs1(0)))
+    assert(!isFilteredOut(sBamRecs1(1)))
+    assert(!isFilteredOut(sBamRecs1(2)))
+    assert(!isFilteredOut(sBamRecs1(3)))
+    assert(!isFilteredOut(sBamRecs1(4)))
+    assert(!isFilteredOut(sBamRecs1(5)))
+    assert(!isFilteredOut(sBamRecs1(5)))
+    assert(!isFilteredOut(sBamRecs1(6)))
   }
 
-  @Test def testSingleBAMFilterOutMultiNotSet() = {
+  @Test def testSingleBamFilterOutMultiNotSet() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320), // overlaps r01, second hit,
       new BasicFeature("chrQ", 451, 480), // overlaps r04
       new BasicFeature("chrQ", 991, 1000) // overlaps nothing; lies in the spliced region of r05
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, sBAMFile1, bloomSize = bloomSize, bloomFp = bloomFp,
+    val isFilteredOut = makeFilterOutFunction(intervals, sBamFile1, bloomSize = bloomSize, bloomFp = bloomFp,
       filterOutMulti = false)
-    assert(!isFilteredOut(sBAMRecs1(0)))
-    assert(!isFilteredOut(sBAMRecs1(1)))
-    assert(isFilteredOut(sBAMRecs1(2)))
-    assert(isFilteredOut(sBAMRecs1(3)))
-    assert(!isFilteredOut(sBAMRecs1(4)))
-    assert(!isFilteredOut(sBAMRecs1(5)))
-    assert(!isFilteredOut(sBAMRecs1(6)))
+    assert(!isFilteredOut(sBamRecs1(0)))
+    assert(!isFilteredOut(sBamRecs1(1)))
+    assert(isFilteredOut(sBamRecs1(2)))
+    assert(isFilteredOut(sBamRecs1(3)))
+    assert(!isFilteredOut(sBamRecs1(4)))
+    assert(!isFilteredOut(sBamRecs1(5)))
+    assert(!isFilteredOut(sBamRecs1(6)))
   }
 
-  @Test def testSingleBAMFilterMinMapQ() = {
+  @Test def testSingleBamFilterMinMapQ() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320),
       new BasicFeature("chrQ", 451, 480)
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, sBAMFile2, bloomSize = bloomSize, bloomFp = bloomFp,
+    val isFilteredOut = makeFilterOutFunction(intervals, sBamFile2, bloomSize = bloomSize, bloomFp = bloomFp,
       minMapQ = 60)
-    assert(!isFilteredOut(sBAMRecs2(0)))
+    assert(!isFilteredOut(sBamRecs2(0)))
     // r01 is not in since it is below the MAPQ threshold
-    assert(!isFilteredOut(sBAMRecs2(1)))
-    assert(!isFilteredOut(sBAMRecs2(2)))
-    assert(isFilteredOut(sBAMRecs2(3)))
-    assert(isFilteredOut(sBAMRecs2(4)))
-    assert(isFilteredOut(sBAMRecs2(5)))
-    assert(!isFilteredOut(sBAMRecs2(6)))
-    assert(!isFilteredOut(sBAMRecs2(7)))
+    assert(!isFilteredOut(sBamRecs2(1)))
+    assert(!isFilteredOut(sBamRecs2(2)))
+    assert(isFilteredOut(sBamRecs2(3)))
+    assert(isFilteredOut(sBamRecs2(4)))
+    assert(isFilteredOut(sBamRecs2(5)))
+    assert(!isFilteredOut(sBamRecs2(6)))
+    assert(!isFilteredOut(sBamRecs2(7)))
   }
 
-  @Test def testSingleBAMFilterMinMapQFilterOutMultiNotSet() = {
+  @Test def testSingleBamFilterMinMapQFilterOutMultiNotSet() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320),
       new BasicFeature("chrQ", 451, 480)
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, sBAMFile2, bloomSize = bloomSize, bloomFp = bloomFp,
+    val isFilteredOut = makeFilterOutFunction(intervals, sBamFile2, bloomSize = bloomSize, bloomFp = bloomFp,
       minMapQ = 60, filterOutMulti = false)
-    assert(!isFilteredOut(sBAMRecs2(0)))
-    assert(!isFilteredOut(sBAMRecs2(1)))
+    assert(!isFilteredOut(sBamRecs2(0)))
+    assert(!isFilteredOut(sBamRecs2(1)))
     // this r01 is not in since it is below the MAPQ threshold
-    assert(!isFilteredOut(sBAMRecs2(2)))
-    assert(isFilteredOut(sBAMRecs2(3)))
-    assert(isFilteredOut(sBAMRecs2(4)))
+    assert(!isFilteredOut(sBamRecs2(2)))
+    assert(isFilteredOut(sBamRecs2(3)))
+    assert(isFilteredOut(sBamRecs2(4)))
     // this r07 is not in since filterOuMulti is false
-    assert(!isFilteredOut(sBAMRecs2(5)))
-    assert(!isFilteredOut(sBAMRecs2(6)))
-    assert(!isFilteredOut(sBAMRecs2(7)))
+    assert(!isFilteredOut(sBamRecs2(5)))
+    assert(!isFilteredOut(sBamRecs2(6)))
+    assert(!isFilteredOut(sBamRecs2(7)))
   }
 
-  @Test def testSingleBAMFilterReadGroupIDs() = {
+  @Test def testSingleBamFilterReadGroupIDs() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320),
       new BasicFeature("chrQ", 451, 480)
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, sBAMFile2, bloomSize = bloomSize, bloomFp = bloomFp,
-      readGroupIDs = Set("002", "003"))
-    assert(!isFilteredOut(sBAMRecs2(0)))
+    val isFilteredOut = makeFilterOutFunction(intervals, sBamFile2, bloomSize = bloomSize, bloomFp = bloomFp,
+      readGroupIds = Set("002", "003"))
+    assert(!isFilteredOut(sBamRecs2(0)))
     // only r01 is in the set since it is RG 002
-    assert(isFilteredOut(sBAMRecs2(1)))
-    assert(isFilteredOut(sBAMRecs2(2)))
-    assert(!isFilteredOut(sBAMRecs2(3)))
-    assert(!isFilteredOut(sBAMRecs2(4)))
-    assert(!isFilteredOut(sBAMRecs2(5)))
-    assert(!isFilteredOut(sBAMRecs2(6)))
-    assert(!isFilteredOut(sBAMRecs2(7)))
+    assert(isFilteredOut(sBamRecs2(1)))
+    assert(isFilteredOut(sBamRecs2(2)))
+    assert(!isFilteredOut(sBamRecs2(3)))
+    assert(!isFilteredOut(sBamRecs2(4)))
+    assert(!isFilteredOut(sBamRecs2(5)))
+    assert(!isFilteredOut(sBamRecs2(6)))
+    assert(!isFilteredOut(sBamRecs2(7)))
   }
 
-  @Test def testPairBAMDefault() = {
+  @Test def testPairBamDefault() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320), // overlaps r01, second hit,
       new BasicFeature("chrQ", 451, 480), // overlaps r04
       new BasicFeature("chrQ", 991, 1000) // overlaps nothing; lies in the spliced region of r05
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, pBAMFile1, bloomSize = bloomSize, bloomFp = bloomFp)
-    assert(!isFilteredOut(pBAMRecs1(0)))
-    assert(!isFilteredOut(pBAMRecs1(1)))
-    assert(isFilteredOut(pBAMRecs1(2)))
-    assert(isFilteredOut(pBAMRecs1(3)))
-    assert(isFilteredOut(pBAMRecs1(4)))
-    assert(isFilteredOut(pBAMRecs1(5)))
-    assert(isFilteredOut(pBAMRecs1(6)))
-    assert(isFilteredOut(pBAMRecs1(7)))
-    assert(!isFilteredOut(pBAMRecs1(8)))
-    assert(!isFilteredOut(pBAMRecs1(9)))
-    assert(!isFilteredOut(pBAMRecs1(10)))
-    assert(!isFilteredOut(pBAMRecs1(11)))
-    assert(!isFilteredOut(pBAMRecs1(12)))
-    assert(!isFilteredOut(pBAMRecs1(13)))
+    val isFilteredOut = makeFilterOutFunction(intervals, pBamFile1, bloomSize = bloomSize, bloomFp = bloomFp)
+    assert(!isFilteredOut(pBamRecs1(0)))
+    assert(!isFilteredOut(pBamRecs1(1)))
+    assert(isFilteredOut(pBamRecs1(2)))
+    assert(isFilteredOut(pBamRecs1(3)))
+    assert(isFilteredOut(pBamRecs1(4)))
+    assert(isFilteredOut(pBamRecs1(5)))
+    assert(isFilteredOut(pBamRecs1(6)))
+    assert(isFilteredOut(pBamRecs1(7)))
+    assert(!isFilteredOut(pBamRecs1(8)))
+    assert(!isFilteredOut(pBamRecs1(9)))
+    assert(!isFilteredOut(pBamRecs1(10)))
+    assert(!isFilteredOut(pBamRecs1(11)))
+    assert(!isFilteredOut(pBamRecs1(12)))
+    assert(!isFilteredOut(pBamRecs1(13)))
   }
 
-  @Test def testPairBAMPartialExonOverlap() = {
+  @Test def testPairBamPartialExonOverlap() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 891, 1000)
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, pBAMFile1, bloomSize = bloomSize, bloomFp = bloomFp)
-    assert(!isFilteredOut(pBAMRecs1(0)))
-    assert(!isFilteredOut(pBAMRecs1(1)))
-    assert(!isFilteredOut(pBAMRecs1(2)))
-    assert(!isFilteredOut(pBAMRecs1(3)))
-    assert(!isFilteredOut(pBAMRecs1(4)))
-    assert(!isFilteredOut(pBAMRecs1(5)))
-    assert(!isFilteredOut(pBAMRecs1(6)))
-    assert(!isFilteredOut(pBAMRecs1(7)))
-    assert(!isFilteredOut(pBAMRecs1(8)))
-    assert(!isFilteredOut(pBAMRecs1(9)))
-    assert(isFilteredOut(pBAMRecs1(10)))
-    assert(isFilteredOut(pBAMRecs1(11)))
-    assert(!isFilteredOut(pBAMRecs1(12)))
-    assert(!isFilteredOut(pBAMRecs1(13)))
+    val isFilteredOut = makeFilterOutFunction(intervals, pBamFile1, bloomSize = bloomSize, bloomFp = bloomFp)
+    assert(!isFilteredOut(pBamRecs1(0)))
+    assert(!isFilteredOut(pBamRecs1(1)))
+    assert(!isFilteredOut(pBamRecs1(2)))
+    assert(!isFilteredOut(pBamRecs1(3)))
+    assert(!isFilteredOut(pBamRecs1(4)))
+    assert(!isFilteredOut(pBamRecs1(5)))
+    assert(!isFilteredOut(pBamRecs1(6)))
+    assert(!isFilteredOut(pBamRecs1(7)))
+    assert(!isFilteredOut(pBamRecs1(8)))
+    assert(!isFilteredOut(pBamRecs1(9)))
+    assert(isFilteredOut(pBamRecs1(10)))
+    assert(isFilteredOut(pBamRecs1(11)))
+    assert(!isFilteredOut(pBamRecs1(12)))
+    assert(!isFilteredOut(pBamRecs1(13)))
   }
 
-  @Test def testPairBAMFilterOutMultiNotSet() = {
+  @Test def testPairBamFilterOutMultiNotSet() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320), // overlaps r01, second hit,
       new BasicFeature("chrQ", 451, 480), // overlaps r04
       new BasicFeature("chrQ", 991, 1000) // overlaps nothing; lies in the spliced region of r05
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, pBAMFile1, bloomSize = bloomSize, bloomFp = bloomFp,
+    val isFilteredOut = makeFilterOutFunction(intervals, pBamFile1, bloomSize = bloomSize, bloomFp = bloomFp,
       filterOutMulti = false)
-    assert(!isFilteredOut(pBAMRecs1(0)))
-    assert(!isFilteredOut(pBAMRecs1(1)))
-    assert(!isFilteredOut(pBAMRecs1(2)))
-    assert(!isFilteredOut(pBAMRecs1(3)))
-    assert(isFilteredOut(pBAMRecs1(4)))
-    assert(isFilteredOut(pBAMRecs1(5)))
-    assert(isFilteredOut(pBAMRecs1(6)))
-    assert(isFilteredOut(pBAMRecs1(7)))
-    assert(!isFilteredOut(pBAMRecs1(8)))
-    assert(!isFilteredOut(pBAMRecs1(9)))
-    assert(!isFilteredOut(pBAMRecs1(10)))
-    assert(!isFilteredOut(pBAMRecs1(11)))
-    assert(!isFilteredOut(pBAMRecs1(12)))
-    assert(!isFilteredOut(pBAMRecs1(13)))
+    assert(!isFilteredOut(pBamRecs1(0)))
+    assert(!isFilteredOut(pBamRecs1(1)))
+    assert(!isFilteredOut(pBamRecs1(2)))
+    assert(!isFilteredOut(pBamRecs1(3)))
+    assert(isFilteredOut(pBamRecs1(4)))
+    assert(isFilteredOut(pBamRecs1(5)))
+    assert(isFilteredOut(pBamRecs1(6)))
+    assert(isFilteredOut(pBamRecs1(7)))
+    assert(!isFilteredOut(pBamRecs1(8)))
+    assert(!isFilteredOut(pBamRecs1(9)))
+    assert(!isFilteredOut(pBamRecs1(10)))
+    assert(!isFilteredOut(pBamRecs1(11)))
+    assert(!isFilteredOut(pBamRecs1(12)))
+    assert(!isFilteredOut(pBamRecs1(13)))
   }
 
-  @Test def testPairBAMFilterMinMapQ() = {
+  @Test def testPairBamFilterMinMapQ() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320),
       new BasicFeature("chrQ", 451, 480)
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, pBAMFile2, bloomSize = bloomSize, bloomFp = bloomFp,
+    val isFilteredOut = makeFilterOutFunction(intervals, pBamFile2, bloomSize = bloomSize, bloomFp = bloomFp,
       minMapQ = 60)
     // r01 is not in since it is below the MAPQ threshold
-    assert(!isFilteredOut(pBAMRecs2(0)))
-    assert(!isFilteredOut(pBAMRecs2(1)))
-    assert(!isFilteredOut(pBAMRecs2(2)))
-    assert(!isFilteredOut(pBAMRecs2(3)))
-    assert(!isFilteredOut(pBAMRecs2(4)))
-    assert(!isFilteredOut(pBAMRecs2(5)))
-    assert(isFilteredOut(pBAMRecs2(6)))
-    assert(isFilteredOut(pBAMRecs2(7)))
-    assert(!isFilteredOut(pBAMRecs2(8)))
-    assert(!isFilteredOut(pBAMRecs2(9)))
+    assert(!isFilteredOut(pBamRecs2(0)))
+    assert(!isFilteredOut(pBamRecs2(1)))
+    assert(!isFilteredOut(pBamRecs2(2)))
+    assert(!isFilteredOut(pBamRecs2(3)))
+    assert(!isFilteredOut(pBamRecs2(4)))
+    assert(!isFilteredOut(pBamRecs2(5)))
+    assert(isFilteredOut(pBamRecs2(6)))
+    assert(isFilteredOut(pBamRecs2(7)))
+    assert(!isFilteredOut(pBamRecs2(8)))
+    assert(!isFilteredOut(pBamRecs2(9)))
   }
 
-  @Test def testPairBAMFilterReadGroupIDs() = {
+  @Test def testPairBamFilterReadGroupIDs() = {
     val intervals: Iterator[Feature] = Iterator(
       new BasicFeature("chrQ", 291, 320),
       new BasicFeature("chrQ", 451, 480)
     )
-    val isFilteredOut = makeFilterOutFunction(intervals, pBAMFile2, bloomSize = bloomSize, bloomFp = bloomFp,
-      readGroupIDs = Set("002", "003"))
+    val isFilteredOut = makeFilterOutFunction(intervals, pBamFile2, bloomSize = bloomSize, bloomFp = bloomFp,
+      readGroupIds = Set("002", "003"))
     // only r01 is in the set since it is RG 002
-    assert(!isFilteredOut(pBAMRecs2(0)))
-    assert(!isFilteredOut(pBAMRecs2(1)))
-    assert(isFilteredOut(pBAMRecs2(2)))
-    assert(isFilteredOut(pBAMRecs2(3)))
-    assert(isFilteredOut(pBAMRecs2(4)))
-    assert(isFilteredOut(pBAMRecs2(5)))
-    assert(!isFilteredOut(pBAMRecs2(6)))
-    assert(!isFilteredOut(pBAMRecs2(7)))
-    assert(!isFilteredOut(pBAMRecs2(8)))
-    assert(!isFilteredOut(pBAMRecs2(9)))
+    assert(!isFilteredOut(pBamRecs2(0)))
+    assert(!isFilteredOut(pBamRecs2(1)))
+    assert(isFilteredOut(pBamRecs2(2)))
+    assert(isFilteredOut(pBamRecs2(3)))
+    assert(isFilteredOut(pBamRecs2(4)))
+    assert(isFilteredOut(pBamRecs2(5)))
+    assert(!isFilteredOut(pBamRecs2(6)))
+    assert(!isFilteredOut(pBamRecs2(7)))
+    assert(!isFilteredOut(pBamRecs2(8)))
+    assert(!isFilteredOut(pBamRecs2(9)))
   }
 
-  @Test def testWriteSingleBAMDefault() = {
+  @Test def testWriteSingleBamDefault() = {
     val mockFilterOutFunc = (r: SAMRecord) => Set("r03", "r04", "r05").contains(r.getReadName)
-    val outBAM = makeTempBAM()
-    val outBAMIndex = makeTempBAMIndex(outBAM)
-    outBAM.deleteOnExit()
-    outBAMIndex.deleteOnExit()
+    val outBam = makeTempBam()
+    val outBamIndex = makeTempBamIndex(outBam)
+    outBam.deleteOnExit()
+    outBamIndex.deleteOnExit()
 
     val stdout = new java.io.ByteArrayOutputStream
     Console.withOut(stdout) {
-      writeFilteredBAM(mockFilterOutFunc, sBAMFile1, outBAM)
+      writeFilteredBam(mockFilterOutFunc, sBamFile1, outBam)
     }
-    stdout.toString should === (
+    stdout.toString should ===(
       "input_bam\toutput_bam\tcount_included\tcount_excluded\n%s\t%s\t%d\t%d\n"
-        .format(sBAMFile1.getName, outBAM.getName, 4, 3)
+        .format(sBamFile1.getName, outBam.getName, 4, 3)
     )
 
-    val exp = new SAMFileReader(sBAMFile3).asScala
-    val obs = new SAMFileReader(outBAM).asScala
+    val exp = new SAMFileReader(sBamFile3).asScala
+    val obs = new SAMFileReader(outBam).asScala
     for ((e, o) <- exp.zip(obs))
-      e.getSAMString should === (o.getSAMString)
-    outBAM should be ('exists)
-    outBAMIndex should be ('exists)
+      e.getSAMString should ===(o.getSAMString)
+    outBam should be('exists)
+    outBamIndex should be('exists)
   }
 
-  @Test def testWriteSingleBAMAndFilteredBAM() = {
+  @Test def testWriteSingleBamAndFilteredBAM() = {
     val mockFilterOutFunc = (r: SAMRecord) => Set("r03", "r04", "r05").contains(r.getReadName)
-    val outBAM = makeTempBAM()
-    val outBAMIndex = makeTempBAMIndex(outBAM)
-    outBAM.deleteOnExit()
-    outBAMIndex.deleteOnExit()
-    val filteredOutBAM = makeTempBAM()
-    val filteredOutBAMIndex = makeTempBAMIndex(filteredOutBAM)
-    filteredOutBAM.deleteOnExit()
-    filteredOutBAMIndex.deleteOnExit()
+    val outBam = makeTempBam()
+    val outBamIndex = makeTempBamIndex(outBam)
+    outBam.deleteOnExit()
+    outBamIndex.deleteOnExit()
+    val filteredOutBam = makeTempBam()
+    val filteredOutBamIndex = makeTempBamIndex(filteredOutBam)
+    filteredOutBam.deleteOnExit()
+    filteredOutBamIndex.deleteOnExit()
 
     val stdout = new java.io.ByteArrayOutputStream
     Console.withOut(stdout) {
-      writeFilteredBAM(mockFilterOutFunc, sBAMFile1, outBAM, filteredOutBAM = filteredOutBAM)
+      writeFilteredBam(mockFilterOutFunc, sBamFile1, outBam, filteredOutBam = filteredOutBam)
     }
-    stdout.toString should === (
+    stdout.toString should ===(
       "input_bam\toutput_bam\tcount_included\tcount_excluded\n%s\t%s\t%d\t%d\n"
-        .format(sBAMFile1.getName, outBAM.getName, 4, 3)
+        .format(sBamFile1.getName, outBam.getName, 4, 3)
     )
 
-    val exp = new SAMFileReader(sBAMFile4).asScala
-    val obs = new SAMFileReader(filteredOutBAM).asScala
+    val exp = new SAMFileReader(sBamFile4).asScala
+    val obs = new SAMFileReader(filteredOutBam).asScala
     for ((e, o) <- exp.zip(obs))
-      e.getSAMString should === (o.getSAMString)
-    outBAM should be ('exists)
-    outBAMIndex should be ('exists)
-    filteredOutBAM should be ('exists)
-    filteredOutBAMIndex should be ('exists)
+      e.getSAMString should ===(o.getSAMString)
+    outBam should be('exists)
+    outBamIndex should be('exists)
+    filteredOutBam should be('exists)
+    filteredOutBamIndex should be('exists)
   }
 
-  @Test def testWritePairBAMDefault() = {
+  @Test def testWritePairBamDefault() = {
     val mockFilterOutFunc = (r: SAMRecord) => Set("r03", "r04", "r05").contains(r.getReadName)
-    val outBAM = makeTempBAM()
-    val outBAMIndex = makeTempBAMIndex(outBAM)
-    outBAM.deleteOnExit()
-    outBAMIndex.deleteOnExit()
+    val outBam = makeTempBam()
+    val outBamIndex = makeTempBamIndex(outBam)
+    outBam.deleteOnExit()
+    outBamIndex.deleteOnExit()
 
     val stdout = new java.io.ByteArrayOutputStream
     Console.withOut(stdout) {
-      writeFilteredBAM(mockFilterOutFunc, pBAMFile1, outBAM)
+      writeFilteredBam(mockFilterOutFunc, pBamFile1, outBam)
     }
-    stdout.toString should === (
+    stdout.toString should ===(
       "input_bam\toutput_bam\tcount_included\tcount_excluded\n%s\t%s\t%d\t%d\n"
-        .format(pBAMFile1.getName, outBAM.getName, 8, 6)
+        .format(pBamFile1.getName, outBam.getName, 8, 6)
     )
-    val exp = new SAMFileReader(pBAMFile3).asScala
-    val obs = new SAMFileReader(outBAM).asScala
+    val exp = new SAMFileReader(pBamFile3).asScala
+    val obs = new SAMFileReader(outBam).asScala
     for ((e, o) <- exp.zip(obs))
-      e.getSAMString should === (o.getSAMString)
-    outBAM should be ('exists)
-    outBAMIndex should be ('exists)
+      e.getSAMString should ===(o.getSAMString)
+    outBam should be('exists)
+    outBamIndex should be('exists)
   }
 }
\ No newline at end of file