diff --git a/public/biopet-core/src/main/scala/nl/lumc/sasc/biopet/core/extensions/PythonCommandLineFunction.scala b/public/biopet-core/src/main/scala/nl/lumc/sasc/biopet/core/extensions/PythonCommandLineFunction.scala
index 69131561efc0acb33fd0acb62e47ea8f094d4df8..e4c0b269568dbabbea6231edece9d76b5dc38dc2 100644
--- a/public/biopet-core/src/main/scala/nl/lumc/sasc/biopet/core/extensions/PythonCommandLineFunction.scala
+++ b/public/biopet-core/src/main/scala/nl/lumc/sasc/biopet/core/extensions/PythonCommandLineFunction.scala
@@ -22,22 +22,22 @@ import org.broadinstitute.gatk.utils.commandline.Input
 
 trait PythonCommandLineFunction extends BiopetCommandLineFunction {
   @Input(doc = "Python script", required = false)
-  var python_script: File = _
+  var pythonScript: File = _
 
   executable = config("exe", default = "python", submodule = "python", freeVar = false)
 
-  protected var python_script_name: String = _
+  protected var pythonScriptName: String = _
 
   /**
    * checks if script already exist in jar otherwise try to fetch from the jar
    * @param script name / location of script
    */
   def setPythonScript(script: String) {
-    python_script = new File(script)
-    if (!python_script.exists()) {
+    pythonScript = new File(script)
+    if (!pythonScript.exists()) {
       setPythonScript(script, "")
     } else {
-      python_script_name = script
+      pythonScriptName = script
     }
   }
 
@@ -47,17 +47,17 @@ trait PythonCommandLineFunction extends BiopetCommandLineFunction {
    * @param subpackage location of script in jar
    */
   def setPythonScript(script: String, subpackage: String) {
-    python_script_name = script
-    python_script = new File(".queue/tmp/" + subpackage + python_script_name)
-    if (!python_script.getParentFile.exists) python_script.getParentFile.mkdirs
-    val is = getClass.getResourceAsStream(subpackage + python_script_name)
-    val os = new FileOutputStream(python_script)
+    pythonScriptName = script
+    pythonScript = new File(".queue/tmp/" + subpackage + pythonScriptName)
+    if (!pythonScript.getParentFile.exists) pythonScript.getParentFile.mkdirs
+    val is = getClass.getResourceAsStream(subpackage + pythonScriptName)
+    val os = new FileOutputStream(pythonScript)
     org.apache.commons.io.IOUtils.copy(is, os)
     os.close()
   }
 
   /** return basic command to prefix the complete command with */
   def getPythonCommand: String = {
-    required(executable) + required(python_script)
+    required(executable) + required(pythonScript)
   }
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cufflinks.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cufflinks.scala
index 9177b7a6b8a7c6828c574d3d88b2d4610137e998..4eb907f3c56744bba48e4f8e5d6b3551877f38fe 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cufflinks.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cufflinks.scala
@@ -44,29 +44,29 @@ class Cufflinks(val root: Configurable) extends BiopetCommandLineFunction with V
 
   @Output(doc = "Output GTF file")
   lazy val outputGtf: File = {
-    require(input != null && output_dir != null,
+    require(input != null && outputDir != null,
       "Can not set Cufflinks GTF output while input file and/or output directory is not defined")
     // cufflinks always outputs a transcripts.gtf file in the output directory
-    new File(output_dir, "transcripts.gtf")
+    new File(outputDir, "transcripts.gtf")
   }
 
   @Output(doc = "Output isoform FPKM file")
   lazy val outputIsoformsFpkm: File = {
-    require(input != null && output_dir != null,
+    require(input != null && outputDir != null,
       "Can not set Cufflinks isoforms.fpkm_tracking output while input file and/or output directory is not defined")
-    new File(output_dir, "isoforms.fpkm_tracking")
+    new File(outputDir, "isoforms.fpkm_tracking")
   }
 
   @Output(doc = "Output GTF file")
   lazy val outputGenesFpkm: File = {
-    require(input != null && output_dir != null,
+    require(input != null && outputDir != null,
       "Can not set Cufflinks genes.fpkm_tracking output while input file and/or output directory is not defined")
     // cufflinks always outputs a genes.fpkm_tracking file in the output directory
-    new File(output_dir, "genes.fpkm_tracking")
+    new File(outputDir, "genes.fpkm_tracking")
   }
 
   /** write all output files to this directory [./] */
-  var output_dir: File = config("output_dir", default = new File("."))
+  var outputDir: File = config("output_dir", default = new File("."))
 
   /** value of random number generator seed [0] */
   var seed: Option[Int] = config("seed")
@@ -75,106 +75,106 @@ class Cufflinks(val root: Configurable) extends BiopetCommandLineFunction with V
   var GTF: Option[File] = config("GTF")
 
   /** use reference transcript annotation to guide assembly */
-  var GTF_guide: Option[File] = config("GTF_guide")
+  var gtfGuide: Option[File] = config("GTF_guide")
 
   /** ignore all alignment within transcripts in this file */
-  var mask_file: Option[File] = config("mask_file")
+  var maskFile: Option[File] = config("mask_file")
 
   /** use bias correction - reference fasta required [NULL] */
-  var frag_bias_correct: Option[String] = config("frag_bias_correct")
+  var fragBiasCorrect: Option[String] = config("frag_bias_correct")
 
   /** use 'rescue method' for multi-reads (more accurate) [FALSE] */
-  var multi_read_correct: Boolean = config("multi_read_correct", default = false)
+  var multiReadCorrect: Boolean = config("multi_read_correct", default = false)
 
   /** library prep used for input reads [below] */
-  var library_type: Option[String] = config("library_type")
+  var libraryType: Option[String] = config("library_type")
 
   /** Method used to normalize library sizes [below] */
-  var library_norm_method: Option[String] = config("library_norm_method")
+  var libraryNormMethod: Option[String] = config("library_norm_method")
 
   /** average fragment length (unpaired reads only) [200] */
-  var frag_len_mean: Option[Int] = config("frag_len_mean")
+  var fragLenMean: Option[Int] = config("frag_len_mean")
 
   /** fragment length std deviation (unpaired reads only) [80] */
-  var frag_len_std_dev: Option[Int] = config("frag_len_std_dev")
+  var fragLenStdDev: Option[Int] = config("frag_len_std_dev")
 
   /** maximum iterations allowed for MLE calculation [5000] */
-  var max_mle_iterations: Option[Int] = config("max_mle_iterations")
+  var maxMleIterations: Option[Int] = config("max_mle_iterations")
 
   /** count hits compatible with reference RNAs only [FALSE] */
-  var compatible_hits_norm: Boolean = config("compatible_hits_norm", default = false)
+  var compatibleHitsNorm: Boolean = config("compatible_hits_norm", default = false)
 
   /** count all hits for normalization [TRUE] */
-  var total_hits_norm: Boolean = config("total_hits_norm", default = true)
+  var totalHitsNorm: Boolean = config("total_hits_norm", default = true)
 
   /** Number of fragment generation samples [100] */
-  var num_frag_count_draws: Option[Int] = config("num_frag_count_draws")
+  var numFragCountDraws: Option[Int] = config("num_frag_count_draws")
 
   /** Number of fragment assignment samples per generation [50] */
-  var num_frag_assign_draws: Option[Int] = config("num_frag_assign_draws")
+  var numFragAssignDraws: Option[Int] = config("num_frag_assign_draws")
 
   /** Maximum number of alignments allowed per fragment [unlim] */
-  var max_frag_multihits: Option[Int] = config("max_frag_multihits")
+  var maxFragMultihits: Option[Int] = config("max_frag_multihits")
 
   /** No effective length correction [FALSE] */
-  var no_effective_length_correction: Boolean = config("no_effective_length_correction", default = false)
+  var noEffectiveLengthCorrection: Boolean = config("no_effective_length_correction", default = false)
 
   /** No length correction [FALSE] */
-  var no_length_correction: Boolean = config("no_length_correction", default = false)
+  var noLengthCorrection: Boolean = config("no_length_correction", default = false)
 
   /** assembled transcripts have this ID prefix [CUFF] */
   var label: Option[String] = config("label")
 
   /** suppress transcripts below this abundance level [0.10] */
-  var min_isoform_fraction: Option[Float] = config("min_isoform_fraction")
+  var minIsoformFraction: Option[Float] = config("min_isoform_fraction")
 
   /** suppress intra-intronic transcripts below this level [0.15] */
-  var pre_mrna_fraction: Option[Float] = config("pre_mrna_fraction")
+  var preMrnaFraction: Option[Float] = config("pre_mrna_fraction")
 
   /** ignore alignments with gaps longer than this [300000] */
-  var max_intron_length: Option[Int] = config("max_intron_length")
+  var maxIntronLength: Option[Int] = config("max_intron_length")
 
   /** alpha for junction binomial test filter [0.001] */
-  var junc_alpha: Option[Float] = config("junc_alpha")
+  var juncAlpha: Option[Float] = config("junc_alpha")
 
   /** percent read overhang taken as 'suspiciously small' [0.09] */
-  var small_anchor_fraction: Option[Float] = config("small_anchor_fraction")
+  var smallAnchorFraction: Option[Float] = config("small_anchor_fraction")
 
   /** minimum number of fragments needed for new transfrags [10] */
-  var min_frags_per_transfrag: Option[Int] = config("min_frags_per_transfrag")
+  var minFragsPerTransfrag: Option[Int] = config("min_frags_per_transfrag")
 
   /** number of terminal exon bp to tolerate in introns [8] */
-  var overhang_tolerance: Option[Int] = config("overhang_tolerance")
+  var overhangTolerance: Option[Int] = config("overhang_tolerance")
 
   /** maximum genomic length allowed for a given bundle [3500000] */
-  var max_bundle_length: Option[Int] = config("max_bundle_length")
+  var maxBundleLength: Option[Int] = config("max_bundle_length")
 
   /** maximum fragments allowed in a bundle before skipping [500000] */
-  var max_bundle_frags: Option[Int] = config("max_bundle_frags")
+  var maxBundleFrags: Option[Int] = config("max_bundle_frags")
 
   /** minimum intron size allowed in genome [50] */
-  var min_intron_length: Option[Int] = config("min_intron_length")
+  var minIntronLength: Option[Int] = config("min_intron_length")
 
   /** minimum avg coverage required to attempt 3' trimming [10] */
-  var trim_3_avgcov_thresh: Option[Int] = config("trim_3_avgcov_thresh")
+  var trim3AvgCovThresh: Option[Int] = config("trim_3_avgcov_thresh")
 
   /** fraction of avg coverage below which to trim 3' end [0.1] */
-  var trim_3_dropoff_frac: Option[Float] = config("trim_3_dropoff_frac")
+  var trim3DropOffFrac: Option[Float] = config("trim_3_dropoff_frac")
 
   /** maximum fraction of allowed multireads per transcript [0.75] */
-  var max_multiread_fraction: Option[Float] = config("max_multiread_fraction")
+  var maxMultireadFraction: Option[Float] = config("max_multiread_fraction")
 
   /** maximum gap size to fill between transfrags (in bp) [50] */
-  var overlap_radius: Option[Int] = config("overlap_radius")
+  var overlapRadius: Option[Int] = config("overlap_radius")
 
   /** disable tiling by faux reads [FALSE] */
-  var no_faux_reads: Boolean = config("no_faux_reads", default = false)
+  var noFauxReads: Boolean = config("no_faux_reads", default = false)
 
   /** overhang allowed on 3' end when merging with reference [600] */
-  var flag_3_overhang_tolerance: Option[Int] = config("flag_3_overhang_tolerance")
+  var flag3OverhangTolerance: Option[Int] = config("flag_3_overhang_tolerance")
 
   /** overhang allowed inside reference intron when merging [30] */
-  var intron_overhang_tolerance: Option[Int] = config("intron_overhang_tolerance")
+  var intronOverhangTolerance: Option[Int] = config("intron_overhang_tolerance")
 
   /** log-friendly verbose processing (no progress bar) [FALSE] */
   var verbose: Boolean = config("verbose", default = false)
@@ -183,7 +183,7 @@ class Cufflinks(val root: Configurable) extends BiopetCommandLineFunction with V
   var quiet: Boolean = config("quiet", default = false)
 
   /** do not contact server to check for update availability [FALSE] */
-  var no_update_check: Boolean = config("no_update_check", default = false)
+  var noUpdateCheck: Boolean = config("no_update_check", default = false)
 
   def versionRegex = """cufflinks v(.*)""".r
   def versionCommand = executable
@@ -191,46 +191,46 @@ class Cufflinks(val root: Configurable) extends BiopetCommandLineFunction with V
 
   def cmdLine =
     required(executable) +
-      required("--output-dir", output_dir) +
+      required("--output-dir", outputDir) +
       optional("--num-threads", threads) +
       optional("--seed", seed) +
       optional("--GTF", GTF) +
-      optional("--GTF-guide", GTF_guide) +
-      optional("--mask-file", mask_file) +
-      optional("--frag-bias-correct", frag_bias_correct) +
-      conditional(multi_read_correct, "--multi-read-correct") +
-      optional("--library-type", library_type) +
-      optional("--library-norm-method", library_norm_method) +
-      optional("--frag-len-mean", frag_len_mean) +
-      optional("--frag-len-std-dev", frag_len_std_dev) +
-      optional("--max-mle-iterations", max_mle_iterations) +
-      conditional(compatible_hits_norm, "--compatible-hits-norm") +
-      conditional(total_hits_norm, "--total-hits-norm") +
-      optional("--num-frag-count-draws", num_frag_count_draws) +
-      optional("--num-frag-assign-draws", num_frag_assign_draws) +
-      optional("--max-frag-multihits", max_frag_multihits) +
-      conditional(no_effective_length_correction, "--no-effective-length-correction") +
-      conditional(no_length_correction, "--no-length-correction") +
+      optional("--GTF-guide", gtfGuide) +
+      optional("--mask-file", maskFile) +
+      optional("--frag-bias-correct", fragBiasCorrect) +
+      conditional(multiReadCorrect, "--multi-read-correct") +
+      optional("--library-type", libraryType) +
+      optional("--library-norm-method", libraryNormMethod) +
+      optional("--frag-len-mean", fragLenMean) +
+      optional("--frag-len-std-dev", fragLenStdDev) +
+      optional("--max-mle-iterations", maxMleIterations) +
+      conditional(compatibleHitsNorm, "--compatible-hits-norm") +
+      conditional(totalHitsNorm, "--total-hits-norm") +
+      optional("--num-frag-count-draws", numFragCountDraws) +
+      optional("--num-frag-assign-draws", numFragAssignDraws) +
+      optional("--max-frag-multihits", maxFragMultihits) +
+      conditional(noEffectiveLengthCorrection, "--no-effective-length-correction") +
+      conditional(noLengthCorrection, "--no-length-correction") +
       optional("--label", label) +
-      optional("--min-isoform-fraction", min_isoform_fraction) +
-      optional("--pre-mrna-fraction", pre_mrna_fraction) +
-      optional("--max-intron-length", max_intron_length) +
-      optional("--junc-alpha", junc_alpha) +
-      optional("--small-anchor-fraction", small_anchor_fraction) +
-      optional("--min-frags-per-transfrag", min_frags_per_transfrag) +
-      optional("--overhang-tolerance", overhang_tolerance) +
-      optional("--max-bundle-length", max_bundle_length) +
-      optional("--max-bundle-frags", max_bundle_frags) +
-      optional("--min-intron-length", min_intron_length) +
-      optional("--trim-3-avgcov-thresh", trim_3_avgcov_thresh) +
-      optional("--trim-3-dropoff-frac", trim_3_dropoff_frac) +
-      optional("--max-multiread-fraction", max_multiread_fraction) +
-      optional("--overlap-radius", overlap_radius) +
-      conditional(no_faux_reads, "--no-faux-reads") +
-      optional("--flag-3-overhang-tolerance", flag_3_overhang_tolerance) +
-      optional("--intron-overhang-tolerance", intron_overhang_tolerance) +
+      optional("--min-isoform-fraction", minIsoformFraction) +
+      optional("--pre-mrna-fraction", preMrnaFraction) +
+      optional("--max-intron-length", maxIntronLength) +
+      optional("--junc-alpha", juncAlpha) +
+      optional("--small-anchor-fraction", smallAnchorFraction) +
+      optional("--min-frags-per-transfrag", minFragsPerTransfrag) +
+      optional("--overhang-tolerance", overhangTolerance) +
+      optional("--max-bundle-length", maxBundleLength) +
+      optional("--max-bundle-frags", maxBundleFrags) +
+      optional("--min-intron-length", minIntronLength) +
+      optional("--trim-3-avgcov-thresh", trim3AvgCovThresh) +
+      optional("--trim-3-dropoff-frac", trim3DropOffFrac) +
+      optional("--max-multiread-fraction", maxMultireadFraction) +
+      optional("--overlap-radius", overlapRadius) +
+      conditional(noFauxReads, "--no-faux-reads") +
+      optional("--flag-3-overhang-tolerance", flag3OverhangTolerance) +
+      optional("--intron-overhang-tolerance", intronOverhangTolerance) +
       conditional(verbose, "--verbose") +
       conditional(quiet, "--quiet") +
-      conditional(no_update_check, "--no-update-check") +
+      conditional(noUpdateCheck, "--no-update-check") +
       required(input)
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cuffquant.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cuffquant.scala
index d7ead689a92b19dbb38dccee1eb8b96312736923..6ac11bc43f9ff0250669d1a9fbcd1a6e04c9bc3f 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cuffquant.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cuffquant.scala
@@ -40,46 +40,46 @@ class Cuffquant(val root: Configurable) extends BiopetCommandLineFunction with V
 
   /** input GTF file */
   @Input(doc = "Input GTF file", required = true)
-  var transcripts_gtf: File = null
+  var transcriptsGtf: File = null
 
   /** output file, computed automatically from output directory */
   @Output(doc = "Output CXB file")
   lazy val outputCxb: File = {
-    require(output_dir != null,
+    require(outputDir != null,
       "Can not set Cuffquant CXB output while input file and/or output directory is not defined")
     // cufflinks always outputs a transcripts.gtf file in the output directory
-    new File(output_dir, "abundances.cxb")
+    new File(outputDir, "abundances.cxb")
   }
 
   /** write all output files to this directory [./] */
-  var output_dir: File = config("output_dir", default = new File("."))
+  var outputDir: File = config("output_dir", default = new File("."))
 
   /** ignore all alignment within transcripts in this file */
-  var mask_file: Option[File] = config("mask_file")
+  var maskFile: Option[File] = config("mask_file")
 
   /** use bias correction - reference fasta required [NULL] */
-  var frag_bias_correct: Option[String] = config("frag_bias_correct")
+  var fragBiasCorrect: Option[String] = config("frag_bias_correct")
 
   /** use 'rescue method' for multi-reads (more accurate) [FALSE] */
-  var multi_read_correct: Boolean = config("multi_read_correct", default = false)
+  var multiReadCorrect: Boolean = config("multi_read_correct", default = false)
 
   /** number of threads used during analysis [1] */
-  var num_threads: Option[Int] = config("num_threads")
+  var numThreads: Option[Int] = config("num_threads")
 
   /** library prep used for input reads [below] */
-  var library_type: Option[String] = config("library_type")
+  var libraryType: Option[String] = config("library_type")
 
   /** average fragment length (unpaired reads only) [200] */
-  var frag_len_mean: Option[Int] = config("frag_len_mean")
+  var fragLenMean: Option[Int] = config("frag_len_mean")
 
   /** fragment length std deviation (unpaired reads only) [80] */
-  var frag_len_std_dev: Option[Int] = config("frag_len_std_dev")
+  var fragLenStdDev: Option[Int] = config("frag_len_std_dev")
 
   /** minimum number of alignments in a locus for testing [10] */
-  var min_alignment_count: Option[Int] = config("min_alignment_count")
+  var minAlignmentCount: Option[Int] = config("min_alignment_count")
 
   /** maximum iterations allowed for MLE calculation [5000] */
-  var max_mle_iterations: Option[Int] = config("max_mle_iterations")
+  var maxMleIterations: Option[Int] = config("max_mle_iterations")
 
   /** log-friendly verbose processing (no progress bar) [FALSE] */
   var verbose: Boolean = config("verbose", default = false)
@@ -91,31 +91,31 @@ class Cuffquant(val root: Configurable) extends BiopetCommandLineFunction with V
   var seed: Option[Int] = config("seed")
 
   /** do not contact server to check for update availability [FALSE] */
-  var no_update_check: Boolean = config("no_update_check", default = false)
+  var noUpdateCheck: Boolean = config("no_update_check", default = false)
 
   /** maximum fragments allowed in a bundle before skipping [500000] */
-  var max_bundle_frags: Option[Int] = config("max_bundle_frags")
+  var maxBundleFrags: Option[Int] = config("max_bundle_frags")
 
   /** Maximum number of alignments allowed per fragment [unlim] */
-  var max_frag_multihits: Option[Int] = config("max_frag_multihits")
+  var maxFragMultihits: Option[Int] = config("max_frag_multihits")
 
   /** No effective length correction [FALSE] */
-  var no_effective_length_correction: Boolean = config("no_effective_length_correction", default = false)
+  var noEffectiveLengthCorrection: Boolean = config("no_effective_length_correction", default = false)
 
   /** No length correction [FALSE] */
-  var no_length_correction: Boolean = config("no_length_correction", default = false)
+  var noLengthCorrection: Boolean = config("no_length_correction", default = false)
 
   /** Skip a random subset of reads this size [0.0] */
-  var read_skip_fraction: Option[Double] = config("read_skip_fraction")
+  var readSkipFraction: Option[Double] = config("read_skip_fraction")
 
   /** Break all read pairs [FALSE] */
-  var no_read_pairs: Boolean = config("no_read_pairs", default = false)
+  var noReadPairs: Boolean = config("no_read_pairs", default = false)
 
   /** Trim reads to be this long (keep 5' end) [none] */
-  var trim_read_length: Option[Int] = config("trim_read_length")
+  var trimReadLength: Option[Int] = config("trim_read_length")
 
   /** Disable SCV correction */
-  var no_scv_correction: Boolean = config("no_scv_correction", default = false)
+  var noScvCorrection: Boolean = config("no_scv_correction", default = false)
 
   def versionRegex = """cuffquant v(.*)""".r
   def versionCommand = executable
@@ -123,28 +123,28 @@ class Cuffquant(val root: Configurable) extends BiopetCommandLineFunction with V
 
   def cmdLine =
     required(executable) +
-      required("--output-dir", output_dir) +
-      optional("--mask-file", mask_file) +
-      optional("--frag-bias-correct", frag_bias_correct) +
-      conditional(multi_read_correct, "--multi-read-correct") +
-      optional("--num-threads", num_threads) +
-      optional("--library-type", library_type) +
-      optional("--frag-len-mean", frag_len_mean) +
-      optional("--frag-len-std-dev", frag_len_std_dev) +
-      optional("--min-alignment-count", min_alignment_count) +
-      optional("--max-mle-iterations", max_mle_iterations) +
+      required("--output-dir", outputDir) +
+      optional("--mask-file", maskFile) +
+      optional("--frag-bias-correct", fragBiasCorrect) +
+      conditional(multiReadCorrect, "--multi-read-correct") +
+      optional("--num-threads", numThreads) +
+      optional("--library-type", libraryType) +
+      optional("--frag-len-mean", fragLenMean) +
+      optional("--frag-len-std-dev", fragLenStdDev) +
+      optional("--min-alignment-count", minAlignmentCount) +
+      optional("--max-mle-iterations", maxMleIterations) +
       conditional(verbose, "--verbose") +
       conditional(quiet, "--quiet") +
       optional("--seed", seed) +
-      conditional(no_update_check, "--no-update-check") +
-      optional("--max-bundle-frags", max_bundle_frags) +
-      optional("--max-frag-multihits", max_frag_multihits) +
-      conditional(no_effective_length_correction, "--no-effective-length-correction") +
-      conditional(no_length_correction, "--no-length-correction") +
-      optional("--read-skip-fraction", read_skip_fraction) +
-      conditional(no_read_pairs, "--no-read-pairs") +
-      optional("--trim-read-length", trim_read_length) +
-      conditional(no_scv_correction, "--no-scv-correction") +
-      required(transcripts_gtf) +
+      conditional(noUpdateCheck, "--no-update-check") +
+      optional("--max-bundle-frags", maxBundleFrags) +
+      optional("--max-frag-multihits", maxFragMultihits) +
+      conditional(noEffectiveLengthCorrection, "--no-effective-length-correction") +
+      conditional(noLengthCorrection, "--no-length-correction") +
+      optional("--read-skip-fraction", readSkipFraction) +
+      conditional(noReadPairs, "--no-read-pairs") +
+      optional("--trim-read-length", trimReadLength) +
+      conditional(noScvCorrection, "--no-scv-correction") +
+      required(transcriptsGtf) +
       required(input.map(_.mkString(";").mkString(" ")))
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cutadapt.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cutadapt.scala
index 777cfb53fc9e56bd7e9f6742bd547c0a03dc4a49..60c25a5a69820b72a7e5bbd0f17cc8b5f0dac3fe 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cutadapt.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Cutadapt.scala
@@ -192,7 +192,7 @@ class Cutadapt(val root: Configurable) extends BiopetCommandLineFunction with Su
       "bpoutput" -> 0,
       "toomanyn" -> 0
     )
-    val adapter_stats: mutable.Map[String, Long] = mutable.Map()
+    val adapterStats: mutable.Map[String, Long] = mutable.Map()
 
     if (statsOutput.exists) {
       val statsFile = Source.fromFile(statsOutput)
@@ -206,7 +206,7 @@ class Cutadapt(val root: Configurable) extends BiopetCommandLineFunction with Su
           case tooManyN(m)              => stats("toomanyn") = m.replaceAll(",", "").toLong
           case basePairsProcessed(m)    => stats("bpinput") = m.replaceAll(",", "").toLong
           case basePairsWritten(m)      => stats("bpoutput") = m.replaceAll(",", "").toLong
-          case adapterR(adapter, count) => adapter_stats += (adapter -> count.toLong)
+          case adapterR(adapter, count) => adapterStats += (adapter -> count.toLong)
           case _                        =>
         }
       }
@@ -224,7 +224,7 @@ class Cutadapt(val root: Configurable) extends BiopetCommandLineFunction with Su
       "num_reads_discarded_many_n" -> stats("toomanyn"),
       "num_bases_input" -> stats("bpinput"),
       "num_based_output" -> stats("bpoutput"),
-      adaptersStatsName -> adapter_stats.toMap
+      adaptersStatsName -> adapterStats.toMap
     )
   }
 
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Fastqc.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Fastqc.scala
index 8906bf134cfef63df8aa86620eea51ab9e918893..ee375163166ca858393bb95ee9f3bc7850896f84 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Fastqc.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Fastqc.scala
@@ -40,7 +40,7 @@ class Fastqc(val root: Configurable) extends BiopetCommandLineFunction with Vers
   var output: File = null
 
   executable = config("exe", default = "fastqc")
-  var java_exe: String = config("exe", default = "java", submodule = "java", freeVar = false)
+  var javaExe: String = config("exe", default = "java", submodule = "java", freeVar = false)
   var kmers: Option[Int] = config("kmers")
   var quiet: Boolean = config("quiet", default = false)
   var noextract: Boolean = config("noextract", default = false)
@@ -85,7 +85,7 @@ class Fastqc(val root: Configurable) extends BiopetCommandLineFunction with Vers
 
   /** return commandline to execute */
   def cmdLine = required(executable) +
-    optional("--java", java_exe) +
+    optional("--java", javaExe) +
     optional("--threads", threads) +
     optional("--contaminants", contaminants) +
     optional("--adapters", adapters) +
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Sickle.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Sickle.scala
index f6571f22065809adacdee46f345997b344897018..9ed1b053822259090a0793045339de270af72f03 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Sickle.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Sickle.scala
@@ -31,22 +31,22 @@ import scala.io.Source
  */
 class Sickle(val root: Configurable) extends BiopetCommandLineFunction with Summarizable with Version {
   @Input(doc = "R1 input")
-  var input_R1: File = _
+  var inputR1: File = _
 
   @Input(doc = "R2 input", required = false)
-  var input_R2: File = _
+  var inputR2: File = _
 
   @Output(doc = "R1 output", required = false)
-  var output_R1: File = _
+  var outputR1: File = _
 
   @Output(doc = "R2 output", required = false)
-  var output_R2: File = _
+  var outputR2: File = _
 
   @Output(doc = "singles output", required = false)
-  var output_singles: File = _
+  var outputSingles: File = _
 
   @Output(doc = "stats output")
-  var output_stats: File = _
+  var outputStats: File = _
 
   executable = config("exe", default = "sickle", freeVar = false)
   var qualityType: Option[String] = config("qualitytype")
@@ -67,22 +67,22 @@ class Sickle(val root: Configurable) extends BiopetCommandLineFunction with Summ
   /** Return command to execute */
   def cmdLine = {
     var cmd: String = required(executable)
-    if (input_R2 != null) {
+    if (inputR2 != null) {
       cmd += required("pe") +
-        required("-r", input_R2) +
-        required("-p", output_R2) +
-        required("-s", output_singles)
+        required("-r", inputR2) +
+        required("-p", outputR2) +
+        required("-s", outputSingles)
     } else cmd += required("se")
     cmd +
-      (if (inputAsStdin) required("-f", new File("/dev/stdin")) else required("-f", input_R1)) +
+      (if (inputAsStdin) required("-f", new File("/dev/stdin")) else required("-f", inputR1)) +
       required("-t", qualityType) +
-      (if (outputAsStsout) required("-o", new File("/dev/stdout")) else required("-o", output_R1)) +
+      (if (outputAsStsout) required("-o", new File("/dev/stdout")) else required("-o", outputR1)) +
       optional("-q", qualityThreshold) +
       optional("-l", lengthThreshold) +
       conditional(noFiveprime, "-x") +
       conditional(discardN, "-n") +
       conditional(quiet || outputAsStsout, "--quiet") +
-      (if (outputAsStsout) "" else " > " + required(output_stats))
+      (if (outputAsStsout) "" else " > " + required(outputStats))
   }
 
   /** returns stats map for summary */
@@ -98,7 +98,7 @@ class Sickle(val root: Configurable) extends BiopetCommandLineFunction with Summ
 
     var stats: mutable.Map[String, Int] = mutable.Map()
 
-    if (output_stats.exists) for (line <- Source.fromFile(output_stats).getLines()) {
+    if (outputStats.exists) for (line <- Source.fromFile(outputStats).getLines()) {
       line match {
         // single run
         case sKept(num)              => stats += ("num_reads_kept" -> num.toInt)
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Star.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Star.scala
index 916e2b4c9d8465426fe693512ae739d2af393979..913c95b7b2a8a40e51682b891816e167128f7e21 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Star.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Star.scala
@@ -146,24 +146,24 @@ object Star {
              outputDir: File,
              isIntermediate: Boolean = false,
              deps: List[File] = Nil): (File, List[Star]) = {
-    val starCommand_pass1 = Star(configurable, R1, R2, new File(outputDir, "aln-pass1"))
-    starCommand_pass1.isIntermediate = isIntermediate
-    starCommand_pass1.deps = deps
-    starCommand_pass1.beforeGraph()
-
-    val starCommand_reindex = new Star(configurable)
-    starCommand_reindex.sjdbFileChrStartEnd = starCommand_pass1.outputTab
-    starCommand_reindex.outputDir = new File(outputDir, "re-index")
-    starCommand_reindex.runmode = "genomeGenerate"
-    starCommand_reindex.isIntermediate = isIntermediate
-    starCommand_reindex.beforeGraph()
-
-    val starCommand_pass2 = Star(configurable, R1, R2, new File(outputDir, "aln-pass2"))
-    starCommand_pass2.genomeDir = starCommand_reindex.outputDir
-    starCommand_pass2.isIntermediate = isIntermediate
-    starCommand_pass2.deps = deps
-    starCommand_pass2.beforeGraph()
-
-    (starCommand_pass2.outputSam, List(starCommand_pass1, starCommand_reindex, starCommand_pass2))
+    val starCommandPass1 = Star(configurable, R1, R2, new File(outputDir, "aln-pass1"))
+    starCommandPass1.isIntermediate = isIntermediate
+    starCommandPass1.deps = deps
+    starCommandPass1.beforeGraph()
+
+    val starCommandReindex = new Star(configurable)
+    starCommandReindex.sjdbFileChrStartEnd = starCommandPass1.outputTab
+    starCommandReindex.outputDir = new File(outputDir, "re-index")
+    starCommandReindex.runmode = "genomeGenerate"
+    starCommandReindex.isIntermediate = isIntermediate
+    starCommandReindex.beforeGraph()
+
+    val starCommandPass2 = Star(configurable, R1, R2, new File(outputDir, "aln-pass2"))
+    starCommandPass2.genomeDir = starCommandReindex.outputDir
+    starCommandPass2.isIntermediate = isIntermediate
+    starCommandPass2.deps = deps
+    starCommandPass2.beforeGraph()
+
+    (starCommandPass2.outputSam, List(starCommandPass1, starCommandReindex, starCommandPass2))
   }
 }
\ No newline at end of file
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Tophat.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Tophat.scala
index 495049da762dee0509e4f8ecb4e208ac0c3dfe3a..83a2b386fc5e9de8568e51804b3d2c1ad51ea8e3 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Tophat.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/Tophat.scala
@@ -42,330 +42,330 @@ class Tophat(val root: Configurable) extends BiopetCommandLineFunction with Refe
   var R2: List[File] = List.empty[File]
 
   private def checkInputsOk(): Unit =
-    require(R1.nonEmpty && output_dir != null, "Read 1 input(s) are defined and output directory is defined")
+    require(R1.nonEmpty && outputDir != null, "Read 1 input(s) are defined and output directory is defined")
 
   /** output files, computed automatically from output directory */
 
   @Output(doc = "Output SAM/BAM file")
   lazy val outputAcceptedHits: File = {
     checkInputsOk()
-    new File(output_dir, if (no_convert_bam) "accepted_hits.sam" else "accepted_hits.bam")
+    new File(outputDir, if (noConvertBam) "accepted_hits.sam" else "accepted_hits.bam")
   }
 
   @Output(doc = "Unmapped SAM/BAM file")
   lazy val outputUnmapped: File = {
     checkInputsOk()
-    new File(output_dir, if (no_convert_bam) "unmapped.sam" else "unmapped.bam")
+    new File(outputDir, if (noConvertBam) "unmapped.sam" else "unmapped.bam")
   }
 
   @Output(doc = "Deletions BED file")
   lazy val outputDeletions: File = {
     checkInputsOk()
-    new File(output_dir, "deletions.bed")
+    new File(outputDir, "deletions.bed")
   }
 
   @Output(doc = "Insertions BED file")
   lazy val outputInsertions: File = {
     checkInputsOk()
-    new File(output_dir, "insertions.bed")
+    new File(outputDir, "insertions.bed")
   }
 
   @Output(doc = "Junctions BED file")
   lazy val outputJunctions: File = {
     checkInputsOk()
-    new File(output_dir, "junctions.bed")
+    new File(outputDir, "junctions.bed")
   }
 
   @Output(doc = "Alignment summary file")
   lazy val outputAlignSummary: File = {
     checkInputsOk()
-    new File(output_dir, "align_summary.txt")
+    new File(outputDir, "align_summary.txt")
   }
 
   @Argument(doc = "Bowtie index", shortName = "bti", required = true)
-  var bowtie_index: String = config("bowtie_index")
+  var bowtieIndex: String = config("bowtie_index")
 
   /** write all output files to this directory [./] */
-  var output_dir: File = config("output_dir", default = new File("tophat_out"))
+  var outputDir: File = config("output_dir", default = new File("tophat_out"))
 
   var bowtie1: Boolean = config("bowtie1", default = false)
 
-  var read_mismatches: Option[Int] = config("read_mismatches")
+  var readMismatches: Option[Int] = config("read_mismatches")
 
-  var read_gap_length: Option[Int] = config("read_gap_length")
+  var readGapLength: Option[Int] = config("read_gap_length")
 
-  var read_edit_dist: Option[Int] = config("read_edit_dist")
+  var readEditDist: Option[Int] = config("read_edit_dist")
 
-  var read_realign_edit_dist: Option[Int] = config("read_realign_edit_dist")
+  var readRealignEditDist: Option[Int] = config("read_realign_edit_dist")
 
-  var min_anchor: Option[Int] = config("min_anchor")
+  var minAnchor: Option[Int] = config("min_anchor")
 
-  var splice_mismatches: Option[String] = config("splice_mismatches")
+  var spliceMismatches: Option[String] = config("splice_mismatches")
 
-  var min_intron_length: Option[Int] = config("min_intron_length")
+  var minIntronLength: Option[Int] = config("min_intron_length")
 
-  var max_intron_length: Option[Int] = config("max_intron_length")
+  var maxIntronLength: Option[Int] = config("max_intron_length")
 
-  var max_multihits: Option[Int] = config("max_multihits")
+  var maxMultihits: Option[Int] = config("max_multihits")
 
-  var suppress_hits: Boolean = config("suppress_hits", default = false)
+  var suppressHits: Boolean = config("suppress_hits", default = false)
 
-  var transcriptome_max_hits: Option[Int] = config("transcriptome_max_hits")
+  var transcriptomeMaxHits: Option[Int] = config("transcriptome_max_hits")
 
-  var prefilter_multihits: Boolean = config("prefilter_multihits", default = false)
+  var preFilterMultihits: Boolean = config("prefilter_multihits", default = false)
 
-  var max_insertion_length: Option[Int] = config("max_insertion_length")
+  var maxInsertionLength: Option[Int] = config("max_insertion_length")
 
-  var max_deletion_length: Option[Int] = config("max_deletion_length")
+  var maxDeletionLength: Option[Int] = config("max_deletion_length")
 
-  var solexa_quals: Boolean = config("solexa_quals", default = false)
+  var solexaQuals: Boolean = config("solexa_quals", default = false)
 
-  var solexa1_3_quals: Boolean = config("solexa1.3_quals", default = false)
+  var solexa13Quals: Boolean = config("solexa1.3_quals", default = false)
 
-  var phred64_quals: Boolean = config("phred64_quals", default = false)
+  var phred64Quals: Boolean = config("phred64_quals", default = false)
 
   var quals: Boolean = config("quals", default = false)
 
-  var integer_quals: Boolean = config("integer_quals", default = false)
+  var integerQuals: Boolean = config("integer_quals", default = false)
 
   var color: Boolean = config("color", default = false)
 
-  var color_out: Boolean = config("color_out", default = false)
+  var colorOut: Boolean = config("color_out", default = false)
 
-  var library_type: Option[String] = config("library_type")
+  var libraryType: Option[String] = config("library_type")
 
   var resume: Option[String] = config("resume")
 
   var GTF: Option[String] = config("GTF")
 
-  var transcriptome_index: Option[String] = config("transcriptome_index")
+  var transcriptomeIndex: Option[String] = config("transcriptome_index")
 
-  var transcriptome_only: Boolean = config("transcriptome_only", default = false)
+  var transcriptomeOnly: Boolean = config("transcriptome_only", default = false)
 
-  var raw_juncs: Option[String] = config("raw_juncs")
+  var rawJuncs: Option[String] = config("raw_juncs")
 
   var insertions: Option[String] = config("insertions")
 
   var deletions: Option[String] = config("deletions")
 
-  var mate_inner_dist: Option[Int] = config("mate_inner_dist")
+  var mateInnerDist: Option[Int] = config("mate_inner_dist")
 
-  var mate_std_dev: Option[Int] = config("mate_std_dev")
+  var mateStdDev: Option[Int] = config("mate_std_dev")
 
-  var no_novel_juncs: Boolean = config("no_novel_juncs", default = false)
+  var noNovelJuncs: Boolean = config("no_novel_juncs", default = false)
 
-  var no_novel_indels: Boolean = config("no_novel_indels", default = false)
+  var noNovelIndels: Boolean = config("no_novel_indels", default = false)
 
-  var no_gtf_juncs: Boolean = config("no_gtf_juncs", default = false)
+  var noGtfJuncs: Boolean = config("no_gtf_juncs", default = false)
 
-  var no_coverage_search: Boolean = config("no_coverage_search", default = false)
+  var noCoverageSearch: Boolean = config("no_coverage_search", default = false)
 
-  var coverage_search: Boolean = config("coverage_search", default = false)
+  var coverageSearch: Boolean = config("coverage_search", default = false)
 
-  var microexon_search: Boolean = config("microexon_search", default = false)
+  var microexonSearch: Boolean = config("microexon_search", default = false)
 
-  var keep_tmp: Boolean = config("keep_tmp", default = false)
+  var keepTmp: Boolean = config("keep_tmp", default = false)
 
-  var tmp_dir: Option[String] = config("tmp_dir")
+  var tmpDir: Option[String] = config("tmp_dir")
 
   var zpacker: Option[String] = config("zpacker")
 
-  var unmapped_fifo: Boolean = config("unmapped_fifo", default = false)
+  var unmappedFifo: Boolean = config("unmapped_fifo", default = false)
 
-  var report_secondary_alignments: Boolean = config("report_secondary_alignments", default = false)
+  var reportSecondaryAlignments: Boolean = config("report_secondary_alignments", default = false)
 
-  var no_discordant: Boolean = config("no_discordant", default = false)
+  var noDiscordant: Boolean = config("no_discordant", default = false)
 
-  var no_mixed: Boolean = config("no_mixed", default = false)
+  var noMixed: Boolean = config("no_mixed", default = false)
 
-  var segment_mismatches: Option[Int] = config("segment_mismatches")
+  var segmentMismatches: Option[Int] = config("segment_mismatches")
 
-  var segment_length: Option[Int] = config("segment_length")
+  var segmentLength: Option[Int] = config("segment_length")
 
-  var bowtie_n: Boolean = config("bowtie_n", default = false)
+  var bowtieN: Boolean = config("bowtie_n", default = false)
 
-  var min_coverage_intron: Option[Int] = config("min_coverage_intron")
+  var minCoverageIntron: Option[Int] = config("min_coverage_intron")
 
-  var max_coverage_intron: Option[Int] = config("max_coverage_intron")
+  var maxCoverageIntron: Option[Int] = config("max_coverage_intron")
 
-  var min_segment_intron: Option[Int] = config("min_segment_intron")
+  var minSegmentIntron: Option[Int] = config("min_segment_intron")
 
-  var max_segment_intron: Option[Int] = config("max_segment_intron")
+  var maxSegmentIntron: Option[Int] = config("max_segment_intron")
 
-  var no_sort_bam: Boolean = config("no_sort_bam", default = false)
+  var noSortBam: Boolean = config("no_sort_bam", default = false)
 
-  var no_convert_bam: Boolean = config("no_convert_bam", default = false)
+  var noConvertBam: Boolean = config("no_convert_bam", default = false)
 
-  var keep_fasta_order: Boolean = config("keep_fasta_order", default = false)
+  var keepFastaOrder: Boolean = config("keep_fasta_order", default = false)
 
-  var allow_partial_mapping: Boolean = config("allow_partial_mapping", default = false)
+  var allowPartialMapping: Boolean = config("allow_partial_mapping", default = false)
 
-  var b2_very_fast: Boolean = config("b2_very_fast", default = false)
+  var b2VeryFast: Boolean = config("b2_very_fast", default = false)
 
-  var b2_fast: Boolean = config("b2_fast", default = false)
+  var b2Fast: Boolean = config("b2_fast", default = false)
 
-  var b2_sensitive: Boolean = config("b2_sensitive", default = false)
+  var b2Sensitive: Boolean = config("b2_sensitive", default = false)
 
-  var b2_very_sensitive: Boolean = config("b2_very_sensitive", default = false)
+  var b2VerySensitive: Boolean = config("b2_very_sensitive", default = false)
 
-  var b2_N: Option[Int] = config("b2_N")
+  var b2N: Option[Int] = config("b2_N")
 
-  var b2_L: Option[Int] = config("b2_L")
+  var b2L: Option[Int] = config("b2_L")
 
-  var b2_i: Option[String] = config("b2_i")
+  var b2I: Option[String] = config("b2_i")
 
-  var b2_n_ceil: Option[String] = config("b2_n_ceil")
+  var b2NCeil: Option[String] = config("b2_n_ceil")
 
-  var b2_gbar: Option[Int] = config("b2_gbar")
+  var b2Gbar: Option[Int] = config("b2_gbar")
 
-  var b2_mp: Option[String] = config("b2_mp")
+  var b2Mp: Option[String] = config("b2_mp")
 
-  var b2_np: Option[Int] = config("b2_np")
+  var b2Np: Option[Int] = config("b2_np")
 
-  var b2_rdg: Option[String] = config("b2_rdg")
+  var b2Rdg: Option[String] = config("b2_rdg")
 
-  var b2_rfg: Option[String] = config("b2_rfg")
+  var b2Rfg: Option[String] = config("b2_rfg")
 
-  var b2_score_min: Option[String] = config("b2_score_min")
+  var b2ScoreMin: Option[String] = config("b2_score_min")
 
-  var b2_D: Option[Int] = config("b2_D")
+  var b2D: Option[Int] = config("b2_D")
 
-  var b2_R: Option[Int] = config("b2_R")
+  var b2R: Option[Int] = config("b2_R")
 
-  var fusion_search: Boolean = config("fusion_search", default = false)
+  var fusionSearch: Boolean = config("fusion_search", default = false)
 
-  var fusion_anchor_length: Option[Int] = config("fusion_anchor_length")
+  var fusionAnchorLength: Option[Int] = config("fusion_anchor_length")
 
-  var fusion_min_dist: Option[Int] = config("fusion_min_dist")
+  var fusionMinDist: Option[Int] = config("fusion_min_dist")
 
-  var fusion_read_mismatches: Option[Int] = config("fusion_read_mismatches")
+  var fusionReadMismatches: Option[Int] = config("fusion_read_mismatches")
 
-  var fusion_multireads: Option[Int] = config("fusion_multireads")
+  var fusionMultireads: Option[Int] = config("fusion_multireads")
 
-  var fusion_multipairs: Option[Int] = config("fusion_multipairs")
+  var fusionMultipairs: Option[Int] = config("fusion_multipairs")
 
-  var fusion_ignore_chromosomes: Option[String] = config("fusion_ignore_chromosomes")
+  var fusionIgnoreChromosomes: Option[String] = config("fusion_ignore_chromosomes")
 
-  var fusion_do_not_resolve_conflicts: Boolean = config("fusion_do_not_resolve_conflicts", default = false)
+  var fusionDoNotResolveConflicts: Boolean = config("fusion_do_not_resolve_conflicts", default = false)
 
-  var rg_id: Option[String] = config("rg_id")
+  var rgId: Option[String] = config("rg_id")
 
-  var rg_sample: Option[String] = config("rg_sample")
+  var rgSample: Option[String] = config("rg_sample")
 
-  var rg_library: Option[String] = config("rg_library")
+  var rgLibrary: Option[String] = config("rg_library")
 
-  var rg_description: Option[String] = config("rg_description")
+  var rgDescription: Option[String] = config("rg_description")
 
-  var rg_platform_unit: Option[String] = config("rg_platform_unit")
+  var rgPlatformUnit: Option[String] = config("rg_platform_unit")
 
-  var rg_center: Option[String] = config("rg_center")
+  var rgCenter: Option[String] = config("rg_center")
 
-  var rg_date: Option[String] = config("rg_date")
+  var rgDate: Option[String] = config("rg_date")
 
-  var rg_platform: Option[String] = config("rg_platform")
+  var rgPlatform: Option[String] = config("rg_platform")
 
   override def beforeGraph: Unit = {
     super.beforeGraph
-    if (bowtie1 && !new File(bowtie_index).getParentFile.list().toList
-      .filter(_.startsWith(new File(bowtie_index).getName)).exists(_.endsWith(".ebwt")))
+    if (bowtie1 && !new File(bowtieIndex).getParentFile.list().toList
+      .filter(_.startsWith(new File(bowtieIndex).getName)).exists(_.endsWith(".ebwt")))
       throw new IllegalArgumentException("No bowtie1 index found for tophat")
-    else if (!new File(bowtie_index).getParentFile.list().toList
-      .filter(_.startsWith(new File(bowtie_index).getName)).exists(_.endsWith(".bt2")))
+    else if (!new File(bowtieIndex).getParentFile.list().toList
+      .filter(_.startsWith(new File(bowtieIndex).getName)).exists(_.endsWith(".bt2")))
       throw new IllegalArgumentException("No bowtie2 index found for tophat")
   }
 
   def cmdLine: String = required(executable) +
-    optional("-o", output_dir) +
+    optional("-o", outputDir) +
     conditional(bowtie1, "--bowtie1") +
-    optional("--read-mismatches", read_mismatches) +
-    optional("--read-gap-length", read_gap_length) +
-    optional("--read-edit-dist", read_edit_dist) +
-    optional("--read-realign-edit-dist", read_realign_edit_dist) +
-    optional("--min-anchor", min_anchor) +
-    optional("--splice-mismatches", splice_mismatches) +
-    optional("--min-intron-length", min_intron_length) +
-    optional("--max-intron-length", max_intron_length) +
-    optional("--max-multihits", max_multihits) +
-    conditional(suppress_hits, "--suppress-hits") +
-    optional("--transcriptome-max-hits", transcriptome_max_hits) +
-    conditional(prefilter_multihits, "--prefilter-multihits") +
-    optional("--max-insertion-length", max_insertion_length) +
-    optional("--max-deletion-length", max_deletion_length) +
-    conditional(solexa_quals, "--solexa-quals") +
-    conditional(solexa1_3_quals, "--solexa1.3-quals") +
-    conditional(phred64_quals, "--phred64-quals") +
+    optional("--read-mismatches", readMismatches) +
+    optional("--read-gap-length", readGapLength) +
+    optional("--read-edit-dist", readEditDist) +
+    optional("--read-realign-edit-dist", readRealignEditDist) +
+    optional("--min-anchor", minAnchor) +
+    optional("--splice-mismatches", spliceMismatches) +
+    optional("--min-intron-length", minIntronLength) +
+    optional("--max-intron-length", maxIntronLength) +
+    optional("--max-multihits", maxMultihits) +
+    conditional(suppressHits, "--suppress-hits") +
+    optional("--transcriptome-max-hits", transcriptomeMaxHits) +
+    conditional(preFilterMultihits, "--prefilter-multihits") +
+    optional("--max-insertion-length", maxInsertionLength) +
+    optional("--max-deletion-length", maxDeletionLength) +
+    conditional(solexaQuals, "--solexa-quals") +
+    conditional(solexa13Quals, "--solexa1.3-quals") +
+    conditional(phred64Quals, "--phred64-quals") +
     conditional(quals, "--quals") +
-    conditional(integer_quals, "--integer-quals") +
+    conditional(integerQuals, "--integer-quals") +
     conditional(color, "--color") +
-    conditional(color_out, "--color-out") +
-    optional("--library-type", library_type) +
+    conditional(colorOut, "--color-out") +
+    optional("--library-type", libraryType) +
     optional("--num-threads", threads) +
     optional("--resume", resume) +
     optional("--GTF", GTF) +
-    optional("--transcriptome-index", transcriptome_index) +
-    conditional(transcriptome_only, "--transcriptome-only") +
-    optional("--raw-juncs", raw_juncs) +
+    optional("--transcriptome-index", transcriptomeIndex) +
+    conditional(transcriptomeOnly, "--transcriptome-only") +
+    optional("--raw-juncs", rawJuncs) +
     optional("--insertions", insertions) +
     optional("--deletions", deletions) +
-    optional("--mate-inner-dist", mate_inner_dist) +
-    optional("--mate-std-dev", mate_std_dev) +
-    conditional(no_novel_juncs, "--no-novel-juncs") +
-    conditional(no_novel_indels, "--no-novel-indels") +
-    conditional(no_gtf_juncs, "--no-gtf-juncs") +
-    conditional(no_coverage_search, "--no-coverage-search") +
-    conditional(coverage_search, "--coverage-search") +
-    conditional(microexon_search, "--microexon-search") +
-    conditional(keep_tmp, "--keep-tmp") +
-    optional("--tmp-dir", tmp_dir) +
+    optional("--mate-inner-dist", mateInnerDist) +
+    optional("--mate-std-dev", mateStdDev) +
+    conditional(noNovelJuncs, "--no-novel-juncs") +
+    conditional(noNovelIndels, "--no-novel-indels") +
+    conditional(noGtfJuncs, "--no-gtf-juncs") +
+    conditional(noCoverageSearch, "--no-coverage-search") +
+    conditional(coverageSearch, "--coverage-search") +
+    conditional(microexonSearch, "--microexon-search") +
+    conditional(keepTmp, "--keep-tmp") +
+    optional("--tmp-dir", tmpDir) +
     optional("--zpacker", zpacker) +
-    conditional(unmapped_fifo, "--unmapped-fifo") +
-    conditional(report_secondary_alignments, "--report-secondary-alignments") +
-    conditional(no_discordant, "--no-discordant") +
-    conditional(no_mixed, "--no-mixed") +
-    optional("--segment-mismatches", segment_mismatches) +
-    optional("--segment-length", segment_length) +
-    conditional(bowtie_n, "--bowtie-n") +
-    optional("--min-coverage-intron", min_coverage_intron) +
-    optional("--max-coverage-intron", max_coverage_intron) +
-    optional("--min-segment-intron", min_segment_intron) +
-    optional("--max-segment-intron", max_segment_intron) +
-    conditional(no_sort_bam, "--no-sort-bam") +
-    conditional(no_convert_bam, "--no-convert-bam") +
-    conditional(keep_fasta_order, "--keep-fasta-order") +
-    conditional(allow_partial_mapping, "--allow-partial-mapping") +
-    conditional(b2_very_fast, "--b2-very-fast") +
-    conditional(b2_fast, "--b2-fast") +
-    conditional(b2_sensitive, "--b2-sensitive") +
-    conditional(b2_very_sensitive, "--b2-very-sensitive") +
-    optional("--b2-N", b2_N) +
-    optional("--b2-L", b2_L) +
-    optional("--b2-i", b2_i) +
-    optional("--b2-n-ceil", b2_n_ceil) +
-    optional("--b2-gbar", b2_gbar) +
-    optional("--b2-mp", b2_mp) +
-    optional("--b2-np", b2_np) +
-    optional("--b2-rdg", b2_rdg) +
-    optional("--b2-rfg", b2_rfg) +
-    optional("--b2-score-min", b2_score_min) +
-    optional("--b2-D", b2_D) +
-    optional("--b2-R", b2_R) +
-    conditional(fusion_search, "--fusion-search") +
-    optional("--fusion-anchor-length", fusion_anchor_length) +
-    optional("--fusion-min-dist", fusion_min_dist) +
-    optional("--fusion-read-mismatches", fusion_read_mismatches) +
-    optional("--fusion-multireads", fusion_multireads) +
-    optional("--fusion-multipairs", fusion_multipairs) +
-    optional("--fusion-ignore-chromosomes", fusion_ignore_chromosomes) +
-    conditional(fusion_do_not_resolve_conflicts, "--fusion-do-not-resolve-conflicts") +
-    optional("--rg-id", rg_id) +
-    optional("--rg-sample", rg_sample) +
-    optional("--rg-library", rg_library) +
-    optional("--rg-description", rg_description) +
-    optional("--rg-platform-unit", rg_platform_unit) +
-    optional("--rg-center", rg_center) +
-    optional("--rg-date", rg_date) +
-    optional("--rg-platform", rg_platform) +
-    required(bowtie_index) +
+    conditional(unmappedFifo, "--unmapped-fifo") +
+    conditional(reportSecondaryAlignments, "--report-secondary-alignments") +
+    conditional(noDiscordant, "--no-discordant") +
+    conditional(noMixed, "--no-mixed") +
+    optional("--segment-mismatches", segmentMismatches) +
+    optional("--segment-length", segmentLength) +
+    conditional(bowtieN, "--bowtie-n") +
+    optional("--min-coverage-intron", minCoverageIntron) +
+    optional("--max-coverage-intron", maxCoverageIntron) +
+    optional("--min-segment-intron", minSegmentIntron) +
+    optional("--max-segment-intron", maxSegmentIntron) +
+    conditional(noSortBam, "--no-sort-bam") +
+    conditional(noConvertBam, "--no-convert-bam") +
+    conditional(keepFastaOrder, "--keep-fasta-order") +
+    conditional(allowPartialMapping, "--allow-partial-mapping") +
+    conditional(b2VeryFast, "--b2-very-fast") +
+    conditional(b2Fast, "--b2-fast") +
+    conditional(b2Sensitive, "--b2-sensitive") +
+    conditional(b2VerySensitive, "--b2-very-sensitive") +
+    optional("--b2-N", b2N) +
+    optional("--b2-L", b2L) +
+    optional("--b2-i", b2I) +
+    optional("--b2-n-ceil", b2NCeil) +
+    optional("--b2-gbar", b2Gbar) +
+    optional("--b2-mp", b2Mp) +
+    optional("--b2-np", b2Np) +
+    optional("--b2-rdg", b2Rdg) +
+    optional("--b2-rfg", b2Rfg) +
+    optional("--b2-score-min", b2ScoreMin) +
+    optional("--b2-D", b2D) +
+    optional("--b2-R", b2R) +
+    conditional(fusionSearch, "--fusion-search") +
+    optional("--fusion-anchor-length", fusionAnchorLength) +
+    optional("--fusion-min-dist", fusionMinDist) +
+    optional("--fusion-read-mismatches", fusionReadMismatches) +
+    optional("--fusion-multireads", fusionMultireads) +
+    optional("--fusion-multipairs", fusionMultipairs) +
+    optional("--fusion-ignore-chromosomes", fusionIgnoreChromosomes) +
+    conditional(fusionDoNotResolveConflicts, "--fusion-do-not-resolve-conflicts") +
+    optional("--rg-id", rgId) +
+    optional("--rg-sample", rgSample) +
+    optional("--rg-library", rgLibrary) +
+    optional("--rg-description", rgDescription) +
+    optional("--rg-platform-unit", rgPlatformUnit) +
+    optional("--rg-center", rgCenter) +
+    optional("--rg-date", rgDate) +
+    optional("--rg-platform", rgPlatform) +
+    required(bowtieIndex) +
     required(R1.mkString(",")) +
     optional(R2.mkString(","))
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/VariantEffectPredictor.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/VariantEffectPredictor.scala
index 97b4c74134a59ce94fea03ce0e843a5b6ee3adbc..c5c0a0446926c8a0da19ecbaea4755662c79087f 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/VariantEffectPredictor.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/VariantEffectPredictor.scala
@@ -48,21 +48,21 @@ class VariantEffectPredictor(val root: Configurable) extends BiopetCommandLineFu
   var v: Boolean = config("v", default = true, freeVar = false)
   var q: Boolean = config("q", default = false, freeVar = false)
   var offline: Boolean = config("offline", default = false)
-  var no_progress: Boolean = config("no_progress", default = false)
+  var noProgress: Boolean = config("no_progress", default = false)
   var everything: Boolean = config("everything", default = false)
   var force: Boolean = config("force", default = false)
-  var no_stats: Boolean = config("no_stats", default = false)
-  var stats_text: Boolean = config("stats_text", default = true)
+  var noStats: Boolean = config("no_stats", default = false)
+  var statsText: Boolean = config("stats_text", default = true)
   var html: Boolean = config("html", default = false)
   var cache: Boolean = config("cache", default = false)
   var humdiv: Boolean = config("humdiv", default = false)
   var regulatory: Boolean = config("regulatory", default = false)
-  var cell_type: Boolean = config("cell_type", default = false)
+  var cellType: Boolean = config("cell_type", default = false)
   var phased: Boolean = config("phased", default = false)
-  var allele_number: Boolean = config("allele_number", default = false)
+  var alleleNumber: Boolean = config("allele_number", default = false)
   var numbers: Boolean = config("numbers", default = false)
   var domains: Boolean = config("domains", default = false)
-  var no_escape: Boolean = config("no_escape", default = false)
+  var noEscape: Boolean = config("no_escape", default = false)
   var hgvs: Boolean = config("hgvs", default = false)
   var protein: Boolean = config("protein", default = false)
   var symbol: Boolean = config("symbol", default = false)
@@ -71,50 +71,50 @@ class VariantEffectPredictor(val root: Configurable) extends BiopetCommandLineFu
   var tsl: Boolean = config("tsl", default = false)
   var canonical: Boolean = config("canonical", default = false)
   var biotype: Boolean = config("biotype", default = false)
-  var xref_refseq: Boolean = config("xref_refseq", default = false)
-  var check_existing: Boolean = config("check_existing", default = false)
-  var check_alleles: Boolean = config("check_alleles", default = false)
-  var check_svs: Boolean = config("svs", default = false)
+  var xrefRefseq: Boolean = config("xref_refseq", default = false)
+  var checkExisting: Boolean = config("check_existing", default = false)
+  var checkAlleles: Boolean = config("check_alleles", default = false)
+  var checkSvs: Boolean = config("svs", default = false)
   var gmaf: Boolean = config("gmaf", default = false)
-  var maf_1kg: Boolean = config("maf_1kg", default = false)
-  var maf_esp: Boolean = config("maf_esp", default = false)
-  var old_map: Boolean = config("old_maf", default = false)
+  var maf1kg: Boolean = config("maf_1kg", default = false)
+  var mafEsp: Boolean = config("maf_esp", default = false)
+  var oldMap: Boolean = config("old_maf", default = false)
   var pubmed: Boolean = config("pubmed", default = false)
 
   var vcf: Boolean = config("vcf", default = true, freeVar = false)
   var json: Boolean = config("json", default = false, freeVar = false)
   var gvf: Boolean = config("gvf", default = false)
-  var check_ref: Boolean = config("check_ref", default = false)
-  var coding_only: Boolean = config("coding_only", default = false)
-  var no_intergenic: Boolean = config("no_intergenic", default = false)
+  var checkRef: Boolean = config("check_ref", default = false)
+  var codingOnly: Boolean = config("coding_only", default = false)
+  var noIntergenic: Boolean = config("no_intergenic", default = false)
   var pick: Boolean = config("pick", default = false)
-  var pick_allele: Boolean = config("pick_allele", default = false)
-  var flag_pick: Boolean = config("flag_pick", default = false)
-  var flag_pick_allele: Boolean = config("flag_pick_allele", default = false)
-  var per_gene: Boolean = config("per_gene", default = false)
-  var most_severe: Boolean = config("most_severe", default = false)
+  var pickAllele: Boolean = config("pick_allele", default = false)
+  var flagPick: Boolean = config("flag_pick", default = false)
+  var flagPickAllele: Boolean = config("flag_pick_allele", default = false)
+  var perGene: Boolean = config("per_gene", default = false)
+  var mostSevere: Boolean = config("most_severe", default = false)
   var summary: Boolean = config("summary", default = false)
-  var filter_common: Boolean = config("filter_common", default = false)
-  var check_frequency: Boolean = config("check_frequency", default = false)
-  var allow_non_variant: Boolean = config("allow_non_variant", default = false)
+  var filterCommon: Boolean = config("filter_common", default = false)
+  var checkFrequency: Boolean = config("check_frequency", default = false)
+  var allowNonVariant: Boolean = config("allow_non_variant", default = false)
   var database: Boolean = config("database", default = false)
   var genomes: Boolean = config("genomes", default = false)
-  var gencode_basic: Boolean = config("gencode_basic", default = false)
+  var gencodeBasic: Boolean = config("gencode_basic", default = false)
   var refseq: Boolean = config("refseq", default = false)
   var merged: Boolean = config("merged", default = false)
-  var all_refseq: Boolean = config("all_refseq", default = false)
+  var allRefseq: Boolean = config("all_refseq", default = false)
   var lrg: Boolean = config("lrg", default = false)
-  var no_whole_genome: Boolean = config("no_whole_genome", default = false)
-  var skip_db_check: Boolean = config("skip_db_check", default = false)
+  var noWholeGenome: Boolean = config("no_whole_genome", default = false)
+  var skibDbCheck: Boolean = config("skip_db_check", default = false)
 
   // Textual args
-  var vep_config: Option[String] = config("config", freeVar = false)
+  var vepConfig: Option[String] = config("config", freeVar = false)
   var species: Option[String] = config("species", freeVar = false)
   var assembly: Option[String] = config("assembly")
   var format: Option[String] = config("format")
   var dir: Option[String] = config("dir")
-  var dir_cache: Option[String] = config("dir_cache")
-  var dir_plugins: Option[String] = config("dir_plugins")
+  var dirCache: Option[String] = config("dir_cache")
+  var dirPlugins: Option[String] = config("dir_plugins")
   var fasta: Option[String] = config("fasta")
   var sift: Option[String] = config("sift")
   var polyphen: Option[String] = config("polyphen")
@@ -125,10 +125,10 @@ class VariantEffectPredictor(val root: Configurable) extends BiopetCommandLineFu
   var convert: Option[String] = config("convert")
   var terms: Option[String] = config("terms")
   var chr: Option[String] = config("chr")
-  var pick_order: Option[String] = config("pick_order")
-  var freq_pop: Option[String] = config("check_pop")
-  var freq_gt_lt: Option[String] = config("freq_gt_lt")
-  var freq_filter: Option[String] = config("freq_filter")
+  var pickOrder: Option[String] = config("pick_order")
+  var freqPop: Option[String] = config("check_pop")
+  var freqGtLt: Option[String] = config("freq_gt_lt")
+  var freqFilter: Option[String] = config("freq_filter")
   var filter: Option[String] = config("filter")
   var host: Option[String] = config("host")
   var user: Option[String] = config("user")
@@ -136,15 +136,15 @@ class VariantEffectPredictor(val root: Configurable) extends BiopetCommandLineFu
   var registry: Option[String] = config("registry")
   var build: Option[String] = config("build")
   var compress: Option[String] = config("compress")
-  var cache_region_size: Option[String] = config("cache_region_size")
+  var cacheRegionSize: Option[String] = config("cache_region_size")
 
   // Numeric args
   override def defaultThreads: Int = config("fork", default = 2)
-  var cache_version: Option[Int] = config("cache_version")
-  var freq_freq: Option[Float] = config("freq_freq")
+  var cacheVersion: Option[Int] = config("cache_version")
+  var freqFreq: Option[Float] = config("freq_freq")
   var port: Option[Int] = config("port")
-  var db_version: Option[Int] = config("db_version")
-  var buffer_size: Option[Int] = config("buffer_size")
+  var dbVersion: Option[Int] = config("db_version")
+  var bufferSize: Option[Int] = config("buffer_size")
   // ought to be a flag, but is BUG in VEP; becomes numeric ("1" is true)
   var failed: Option[Int] = config("failed")
 
@@ -165,21 +165,21 @@ class VariantEffectPredictor(val root: Configurable) extends BiopetCommandLineFu
     conditional(v, "-v") +
     conditional(q, "-q") +
     conditional(offline, "--offline") +
-    conditional(no_progress, "--no_progress") +
+    conditional(noProgress, "--no_progress") +
     conditional(everything, "--everything") +
     conditional(force, "--force_overwrite") +
-    conditional(no_stats, "--no_stats") +
-    conditional(stats_text, "--stats_text") +
+    conditional(noStats, "--no_stats") +
+    conditional(statsText, "--stats_text") +
     conditional(html, "--html") +
     conditional(cache, "--cache") +
     conditional(humdiv, "--humdiv") +
     conditional(regulatory, "--regulatory") +
-    conditional(cell_type, "--cel_type") +
+    conditional(cellType, "--cel_type") +
     conditional(phased, "--phased") +
-    conditional(allele_number, "--allele_number") +
+    conditional(alleleNumber, "--allele_number") +
     conditional(numbers, "--numbers") +
     conditional(domains, "--domains") +
-    conditional(no_escape, "--no_escape") +
+    conditional(noEscape, "--no_escape") +
     conditional(hgvs, "--hgvs") +
     conditional(protein, "--protein") +
     conditional(symbol, "--symbol") +
@@ -188,46 +188,46 @@ class VariantEffectPredictor(val root: Configurable) extends BiopetCommandLineFu
     conditional(tsl, "--tsl") +
     conditional(canonical, "--canonical") +
     conditional(biotype, "--biotype") +
-    conditional(xref_refseq, "--xref_refseq") +
-    conditional(check_existing, "--check_existing") +
-    conditional(check_alleles, "--check_alleles") +
-    conditional(check_svs, "--check_svs") +
+    conditional(xrefRefseq, "--xref_refseq") +
+    conditional(checkExisting, "--check_existing") +
+    conditional(checkAlleles, "--check_alleles") +
+    conditional(checkSvs, "--check_svs") +
     conditional(gmaf, "--gmaf") +
-    conditional(maf_1kg, "--maf_1kg") +
-    conditional(maf_esp, "--maf_esp") +
+    conditional(maf1kg, "--maf_1kg") +
+    conditional(mafEsp, "--maf_esp") +
     conditional(pubmed, "--pubmed") +
     conditional(vcf, "--vcf") +
     conditional(json, "--json") +
     conditional(gvf, "--gvf") +
-    conditional(check_ref, "--check_ref") +
-    conditional(coding_only, "--coding_only") +
-    conditional(no_intergenic, "--no_intergenic") +
+    conditional(checkRef, "--check_ref") +
+    conditional(codingOnly, "--coding_only") +
+    conditional(noIntergenic, "--no_intergenic") +
     conditional(pick, "--pick") +
-    conditional(pick_allele, "--pick_allele") +
-    conditional(flag_pick, "--flag_pick") +
-    conditional(flag_pick_allele, "--flag_pick_allele") +
-    conditional(per_gene, "--per_gene") +
-    conditional(most_severe, "--most_severe") +
+    conditional(pickAllele, "--pick_allele") +
+    conditional(flagPick, "--flag_pick") +
+    conditional(flagPickAllele, "--flag_pick_allele") +
+    conditional(perGene, "--per_gene") +
+    conditional(mostSevere, "--most_severe") +
     conditional(summary, "--summary") +
-    conditional(filter_common, "--filter_common") +
-    conditional(check_frequency, "--check_frequency") +
-    conditional(allow_non_variant, "--allow_non_variant") +
+    conditional(filterCommon, "--filter_common") +
+    conditional(checkFrequency, "--check_frequency") +
+    conditional(allowNonVariant, "--allow_non_variant") +
     conditional(database, "--database") +
     conditional(genomes, "--genomes") +
-    conditional(gencode_basic, "--gencode_basic") +
+    conditional(gencodeBasic, "--gencode_basic") +
     conditional(refseq, "--refseq") +
     conditional(merged, "--merged") +
-    conditional(all_refseq, "--all_refseq") +
+    conditional(allRefseq, "--all_refseq") +
     conditional(lrg, "--lrg") +
-    conditional(no_whole_genome, "--no_whole_genome") +
-    conditional(skip_db_check, "--skip_db_check") +
-    optional("--config", vep_config) +
+    conditional(noWholeGenome, "--no_whole_genome") +
+    conditional(skibDbCheck, "--skip_db_check") +
+    optional("--config", vepConfig) +
     optional("--species", species) +
     optional("--assembly", assembly) +
     optional("--format", format) +
     optional("--dir", dir) +
-    optional("--dir_cache", dir_cache) +
-    optional("--dir_plugins", dir_plugins) +
+    optional("--dir_cache", dirCache) +
+    optional("--dir_plugins", dirPlugins) +
     optional("--fasta", fasta) +
     optional("--sift", sift) +
     optional("--polyphen", polyphen) +
@@ -238,10 +238,10 @@ class VariantEffectPredictor(val root: Configurable) extends BiopetCommandLineFu
     optional("--convert", convert) +
     optional("--terms", terms) +
     optional("--chr", chr) +
-    optional("--pick_order", pick_order) +
-    optional("--freq_pop", freq_pop) +
-    optional("--freq_gt_lt", freq_gt_lt) +
-    optional("--freq_filter", freq_filter) +
+    optional("--pick_order", pickOrder) +
+    optional("--freq_pop", freqPop) +
+    optional("--freq_gt_lt", freqGtLt) +
+    optional("--freq_filter", freqFilter) +
     optional("--filter", filter) +
     optional("--host", host) +
     optional("--user", user) +
@@ -249,19 +249,19 @@ class VariantEffectPredictor(val root: Configurable) extends BiopetCommandLineFu
     optional("--registry", registry) +
     optional("--build", build) +
     optional("--compress", compress) +
-    optional("--cache_region_size", cache_region_size) +
+    optional("--cache_region_size", cacheRegionSize) +
     optional("--fork", threads) +
-    optional("--cache_version", cache_version) +
-    optional("--freq_freq", freq_freq) +
+    optional("--cache_version", cacheVersion) +
+    optional("--freq_freq", freqFreq) +
     optional("--port", port) +
-    optional("--db_version", db_version) +
-    optional("--buffer_size", buffer_size) +
+    optional("--db_version", dbVersion) +
+    optional("--buffer_size", bufferSize) +
     optional("--failed", failed)
 
   def summaryFiles: Map[String, File] = Map()
 
   def summaryStats: Map[String, Any] = {
-    if (stats_text) {
+    if (statsText) {
       val stats_file: File = new File(output.getAbsolutePath + "_summary.txt")
       parseStatsFile(stats_file)
     } else {
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/bowtie/Bowtie.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/bowtie/Bowtie.scala
index 429c65b985980f2874dc53ff4a97c546d4c29f13..92abaded51b2b28c5fbad52329e19df724c9ea0d 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/bowtie/Bowtie.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/bowtie/Bowtie.scala
@@ -46,7 +46,7 @@ class Bowtie(val root: Configurable) extends BiopetCommandLineFunction with Refe
   override def defaultThreads = 8
 
   var sam: Boolean = config("sam", default = false)
-  var sam_RG: Option[String] = config("sam-RG")
+  var samRg: Option[String] = config("sam-RG")
   var seedlen: Option[Int] = config("seedlen")
   var seedmms: Option[Int] = config("seedmms")
   var k: Option[Int] = config("k")
@@ -80,7 +80,7 @@ class Bowtie(val root: Configurable) extends BiopetCommandLineFunction with Refe
     conditional(largeIndex, "--large-index") +
     conditional(best, "--best") +
     conditional(strata, "--strata") +
-    optional("--sam-RG", sam_RG) +
+    optional("--sam-RG", samRg) +
     optional("--seedlen", seedlen) +
     optional("--seedmms", seedmms) +
     optional("-k", k) +
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/bowtie/Bowtie2.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/bowtie/Bowtie2.scala
index 793fd7d159a64c5f432afd575d5a04b7d4fa7c09..2ebce9331ec32c152695c1dac4adea618040b7b1 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/bowtie/Bowtie2.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/bowtie/Bowtie2.scala
@@ -45,20 +45,20 @@ class Bowtie2(val root: Configurable) extends BiopetCommandLineFunction with Ref
   var trim3: Option[Int] = config("trim3")
   var phred33: Boolean = config("phred33", default = false)
   var phred64: Boolean = config("phred64", default = false)
-  var int_quals: Boolean = config("int_quals", default = false)
+  var intQuals: Boolean = config("int_quals", default = false)
 
   /* Alignment options */
   var N: Option[Int] = config("N")
   var L: Option[Int] = config("L")
   var i: Option[String] = config("i")
-  var n_ceil: Option[String] = config("n_ceil")
+  var nCeil: Option[String] = config("n_ceil")
   var dpad: Option[Int] = config("dpad")
   var gbar: Option[Int] = config("gbar")
-  var ignore_quals: Boolean = config("ignore_quals", default = false)
+  var ignoreQuals: Boolean = config("ignore_quals", default = false)
   var nofw: Boolean = config("nofw", default = false)
   var norc: Boolean = config("norc", default = false)
-  var no_1mm_upfront: Boolean = config("no_1mm_upfront", default = false)
-  var end_to_end: Boolean = config("end_to_end", default = false)
+  var no1MmUpfront: Boolean = config("no_1mm_upfront", default = false)
+  var endToEnd: Boolean = config("end_to_end", default = false)
   var local: Boolean = config("local", default = false)
 
   /* Scoring */
@@ -67,7 +67,7 @@ class Bowtie2(val root: Configurable) extends BiopetCommandLineFunction with Ref
   var np: Option[Int] = config("np")
   var rdg: Option[String] = config("rdg")
   var rfg: Option[String] = config("rfg")
-  var score_min: Option[String] = config("score_min")
+  var scoreMin: Option[String] = config("score_min")
 
   /* Reporting */
   var k: Option[Int] = config("k")
@@ -83,52 +83,52 @@ class Bowtie2(val root: Configurable) extends BiopetCommandLineFunction with Ref
   var fr: Boolean = config("fr", default = false)
   var rf: Boolean = config("rf", default = false)
   var ff: Boolean = config("ff", default = false)
-  var no_mixed: Boolean = config("no_mixed", default = false)
-  var no_discordant: Boolean = config("no_discordant", default = false)
-  var no_dovetail: Boolean = config("no_dovetail", default = false)
-  var no_contain: Boolean = config("no_contain", default = false)
-  var no_overlap: Boolean = config("no_overlap", default = false)
+  var noMixed: Boolean = config("no_mixed", default = false)
+  var noDiscordant: Boolean = config("no_discordant", default = false)
+  var noDovetail: Boolean = config("no_dovetail", default = false)
+  var noContain: Boolean = config("no_contain", default = false)
+  var noOverlap: Boolean = config("no_overlap", default = false)
 
   /* Output */
   var time: Boolean = config("no_overlap", default = false)
 
   var un: Option[String] = config("un")
   var al: Option[String] = config("al")
-  var un_conc: Option[String] = config("un_conc")
-  var al_conc: Option[String] = config("al_conc")
+  var unConc: Option[String] = config("un_conc")
+  var alConc: Option[String] = config("al_conc")
 
-  var un_gz: Option[String] = config("un_gz")
-  var al_gz: Option[String] = config("al_gz")
-  var un_conc_gz: Option[String] = config("un_conc_gz")
-  var al_conc_gz: Option[String] = config("al_conc_gz")
+  var unGz: Option[String] = config("un_gz")
+  var alGz: Option[String] = config("al_gz")
+  var unConcGz: Option[String] = config("un_conc_gz")
+  var alConcGz: Option[String] = config("al_conc_gz")
 
-  var un_bz2: Option[String] = config("un_bz2")
-  var al_bz2: Option[String] = config("al_bz2")
-  var un_conc_bz2: Option[String] = config("un_conc_bz2")
-  var al_conc_bz2: Option[String] = config("al_conc_bz2")
+  var unBz2: Option[String] = config("un_bz2")
+  var alBz2: Option[String] = config("al_bz2")
+  var unConcBz2: Option[String] = config("un_conc_bz2")
+  var alConcBz2: Option[String] = config("al_conc_bz2")
 
   var quiet: Boolean = config("quiet", default = false)
-  var met_file: Option[String] = config("met_file")
-  var met_stderr: Boolean = config("met_stderr", default = false)
+  var metFile: Option[String] = config("met_file")
+  var metStderr: Boolean = config("met_stderr", default = false)
   var met: Option[Int] = config("met")
 
-  var no_unal: Boolean = config("no_unal", default = false)
-  var no_head: Boolean = config("no_head", default = false)
-  var no_sq: Boolean = config("no_sq", default = false)
+  var noUnal: Boolean = config("no_unal", default = false)
+  var noHead: Boolean = config("no_head", default = false)
+  var noSq: Boolean = config("no_sq", default = false)
 
-  var rg_id: Option[String] = config("rg_id")
+  var rgId: Option[String] = config("rg_id")
   var rg: List[String] = config("rg", default = Nil)
 
-  var omit_sec_seq: Boolean = config("omit_sec_seq", default = false)
+  var omitSecSeq: Boolean = config("omit_sec_seq", default = false)
 
   /* Performance */
   var reorder: Boolean = config("reorder", default = false)
   var mm: Boolean = config("mm", default = false)
 
   /* Other */
-  var qc_filter: Boolean = config("qc_filter", default = false)
+  var qcFilter: Boolean = config("qc_filter", default = false)
   var seed: Option[Int] = config("seed")
-  var non_deterministic: Boolean = config("non_deterministic", default = false)
+  var nonDeterministic: Boolean = config("non_deterministic", default = false)
 
   override def beforeGraph() {
     super.beforeGraph()
@@ -153,19 +153,19 @@ class Bowtie2(val root: Configurable) extends BiopetCommandLineFunction with Ref
     optional("--trim5", trim5) +
     conditional(phred33, "--phred33") +
     conditional(phred64, "--phred64") +
-    conditional(int_quals, "--int-quals") +
+    conditional(intQuals, "--int-quals") +
     /* Alignment options */
     optional("-N", N) +
     optional("-L", L) +
     optional("-i", i) +
-    optional("--n-ceil", n_ceil) +
+    optional("--n-ceil", nCeil) +
     optional("--dpad", dpad) +
     optional("--gbar", gbar) +
-    conditional(ignore_quals, "--ignore-quals") +
+    conditional(ignoreQuals, "--ignore-quals") +
     conditional(nofw, "--nofw") +
     conditional(norc, "--norc") +
-    conditional(no_1mm_upfront, "--no-1mm-upfront") +
-    conditional(end_to_end, "--end-to-end") +
+    conditional(no1MmUpfront, "--no-1mm-upfront") +
+    conditional(endToEnd, "--end-to-end") +
     conditional(local, "--local") +
     /* Scoring */
     optional("--ma", ma) +
@@ -173,7 +173,7 @@ class Bowtie2(val root: Configurable) extends BiopetCommandLineFunction with Ref
     optional("--np", np) +
     optional("--rdg", rdg) +
     optional("--rfg", rfg) +
-    optional("--score-min", score_min) +
+    optional("--score-min", scoreMin) +
     /* Reporting */
     optional("-k", k) +
     optional("--all", all) +
@@ -186,43 +186,43 @@ class Bowtie2(val root: Configurable) extends BiopetCommandLineFunction with Ref
     conditional(fr, "--fr") +
     conditional(rf, "--rf") +
     conditional(ff, "--ff") +
-    conditional(no_mixed, "--no-mixed") +
-    conditional(no_discordant, "--no-discordant") +
-    conditional(no_dovetail, "--no-dovetail") +
-    conditional(no_contain, "--no-contain") +
-    conditional(no_overlap, "--no-overlap") +
+    conditional(noMixed, "--no-mixed") +
+    conditional(noDiscordant, "--no-discordant") +
+    conditional(noDovetail, "--no-dovetail") +
+    conditional(noContain, "--no-contain") +
+    conditional(noOverlap, "--no-overlap") +
     /* Output */
     conditional(time, "--time") +
     optional("--un", un) +
     optional("--al", al) +
-    optional("--un-conc", un_conc) +
-    optional("--al-conc", al_conc) +
-    optional("--un-gz", un_gz) +
-    optional("--al-gz", al_gz) +
-    optional("--un-conc-gz", un_conc_gz) +
-    optional("--al-conc-gz", al_conc_gz) +
-    optional("--un-bz2", un_bz2) +
-    optional("--al-bz2", al_bz2) +
-    optional("--un-conc-bz2", un_conc_bz2) +
-    optional("--al-conc-bz2", al_conc_bz2) +
+    optional("--un-conc", unConc) +
+    optional("--al-conc", alConc) +
+    optional("--un-gz", unGz) +
+    optional("--al-gz", alGz) +
+    optional("--un-conc-gz", unConcGz) +
+    optional("--al-conc-gz", alConcGz) +
+    optional("--un-bz2", unBz2) +
+    optional("--al-bz2", alBz2) +
+    optional("--un-conc-bz2", unConcBz2) +
+    optional("--al-conc-bz2", alConcBz2) +
     conditional(quiet, "--quiet") +
-    optional("--met-file", met_file) +
-    conditional(met_stderr, "--met-stderr") +
+    optional("--met-file", metFile) +
+    conditional(metStderr, "--met-stderr") +
     optional("--met", met) +
-    conditional(no_unal, "--no-unal") +
-    conditional(no_head, "--no-head") +
-    conditional(no_sq, "--no-sq") +
-    optional("--rg-id", rg_id) +
+    conditional(noUnal, "--no-unal") +
+    conditional(noHead, "--no-head") +
+    conditional(noSq, "--no-sq") +
+    optional("--rg-id", rgId) +
     repeat("--rg", rg) +
-    conditional(omit_sec_seq, "--omit-sec-seq") +
+    conditional(omitSecSeq, "--omit-sec-seq") +
     /* Performance */
     optional("--threads", threads) +
     conditional(reorder, "--reorder") +
     conditional(mm, "--mm") +
     /* Other */
-    conditional(qc_filter, "--qc-filter") +
+    conditional(qcFilter, "--qc-filter") +
     optional("--seed", seed) +
-    conditional(non_deterministic, "--non-deterministic") +
+    conditional(nonDeterministic, "--non-deterministic") +
     /* Required */
     required("-x", bowtieIndex) +
     (R2 match {
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/breakdancer/BreakdancerConfig.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/breakdancer/BreakdancerConfig.scala
index 2b310aaf8c6b38933f4c11badedfbf7d57084bef..7986c837428b80132581438ae33636914e2b1c99 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/breakdancer/BreakdancerConfig.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/breakdancer/BreakdancerConfig.scala
@@ -30,26 +30,26 @@ class BreakdancerConfig(val root: Configurable) extends BiopetCommandLineFunctio
   @Output(doc = "Output File")
   var output: File = _
 
-  var min_mq: Option[Int] = config("min_mq", default = 20) // minimum of MQ to consider for taking read into histogram
-  var use_mq: Boolean = config("use_mq", default = false)
-  var min_insertsize: Option[Int] = config("min_insertsize")
-  var solid_data: Boolean = config("solid", default = false)
-  var sd_cutoff: Option[Int] = config("sd_cutoff") // Cutoff in unit of standard deviation [4]
+  var minMq: Option[Int] = config("min_mq", default = 20) // minimum of MQ to consider for taking read into histogram
+  var useMq: Boolean = config("use_mq", default = false)
+  var minInsertsize: Option[Int] = config("min_insertsize")
+  var solidData: Boolean = config("solid", default = false)
+  var sdCutoff: Option[Int] = config("sd_cutoff") // Cutoff in unit of standard deviation [4]
 
   // we set this to a higher number to avoid biases in small numbers in sorted bams
-  var min_observations: Option[Int] = config("min_observations") //  Number of observation required to estimate mean and s.d. insert size [10_000]
-  var coefvar_cutoff: Option[Int] = config("coef_cutoff") // Cutoff on coefficients of variation [1]
-  var histogram_bins: Option[Int] = config("histogram_bins") // Number of bins in the histogram [50]
+  var minObservations: Option[Int] = config("min_observations") //  Number of observation required to estimate mean and s.d. insert size [10_000]
+  var coefvarCutoff: Option[Int] = config("coef_cutoff") // Cutoff on coefficients of variation [1]
+  var histogramBins: Option[Int] = config("histogram_bins") // Number of bins in the histogram [50]
 
   def cmdLine = required(executable) +
-    optional("-q", min_mq) +
-    conditional(use_mq, "-m") +
-    optional("-s", min_insertsize) +
-    conditional(solid_data, "-s") +
-    optional("-c", sd_cutoff) +
-    optional("-n", min_observations) +
-    optional("-v", coefvar_cutoff) +
-    optional("-b", histogram_bins) +
+    optional("-q", minMq) +
+    conditional(useMq, "-m") +
+    optional("-s", minInsertsize) +
+    conditional(solidData, "-s") +
+    optional("-c", sdCutoff) +
+    optional("-n", minObservations) +
+    optional("-v", coefvarCutoff) +
+    optional("-b", histogramBins) +
     required(input) + " 1> " + required(output)
 }
 
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/conifer/Conifer.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/conifer/Conifer.scala
index 5f517d7b338f0236fe051edd320ea9e4d4775fdb..f241c576ee1fa2ff7b7169f5e7cee7abc265045e 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/conifer/Conifer.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/conifer/Conifer.scala
@@ -24,7 +24,7 @@ abstract class Conifer extends PythonCommandLineFunction with Version {
   setPythonScript(config("script", default = "conifer"))
   def versionRegex = """(.*)""".r
   override def versionExitcode = List(0)
-  def versionCommand = executable + " " + python_script + " --version"
+  def versionCommand = executable + " " + pythonScript + " --version"
 
   override def defaultCoreMemory = 5.0
   override def defaultThreads = 1
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/conifer/ConiferAnalyze.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/conifer/ConiferAnalyze.scala
index 284d0e059dea542f2550db22b086b4a4db9837da..40790f0ac3cad8ae4f4c5af309b5b078bbd99002 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/conifer/ConiferAnalyze.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/conifer/ConiferAnalyze.scala
@@ -36,7 +36,7 @@ class ConiferAnalyze(val root: Configurable) extends Conifer {
   var svd: Option[Int] = config("svd", default = 1)
 
   @Argument(doc = "Minimum population median RPKM per probe", required = false)
-  var min_rpkm: Option[Double] = config("min_rpkm")
+  var minRpkm: Option[Double] = config("min_rpkm")
 
   override def cmdLine = super.cmdLine +
     " analyze " +
@@ -44,5 +44,5 @@ class ConiferAnalyze(val root: Configurable) extends Conifer {
     " --rpkm_dir" + required(rpkmDir) +
     " --output" + required(output) +
     optional("--svd", svd) +
-    optional("--min_rpkm", min_rpkm)
+    optional("--min_rpkm", minRpkm)
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/gmap/Gsnap.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/gmap/Gsnap.scala
index 7b92be3b16946a6ae74dbe1537031a0ac864e534..d8059551cfd9504ce778150384fda67151500397 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/gmap/Gsnap.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/gmap/Gsnap.scala
@@ -53,7 +53,7 @@ class Gsnap(val root: Configurable) extends BiopetCommandLineFunction with Refer
   var db: String = config("db")
 
   /** whether to use a suffix array, which will give increased speed */
-  var use_sarray: Option[Int] = config("use_sarray")
+  var useSarray: Option[Int] = config("use_sarray")
 
   /** kmer size to use in genome database (allowed values: 16 or less) */
   var kmer: Option[Int] = config("kmer")
@@ -65,28 +65,28 @@ class Gsnap(val root: Configurable) extends BiopetCommandLineFunction with Refer
   var part: Option[String] = config("part")
 
   /** size of input buffer (program reads this many sequences at a time)*/
-  var input_buffer_size: Option[Int] = config("input_buffer_size")
+  var inputBufferSize: Option[Int] = config("input_buffer_size")
 
   /** amount of barcode to remove from start of read */
-  var barcode_length: Option[Int] = config("barcode_length")
+  var barcodeLength: Option[Int] = config("barcode_length")
 
   /** orientation of paired-end reads */
   var orientation: Option[String] = config("orientation")
 
   /** starting position of identifier in fastq header, space-delimited (>= 1) */
-  var fastq_id_start: Option[Int] = config("fastq_id_start")
+  var fastqIdStart: Option[Int] = config("fastq_id_start")
 
   /** ending position of identifier in fastq header, space-delimited (>= 1) */
-  var fastq_id_end: Option[Int] = config("fastq_id_end")
+  var fastqIdEnd: Option[Int] = config("fastq_id_end")
 
   /** when multiple fastq files are provided on the command line, gsnap assumes */
-  var force_single_end: Boolean = config("force_single_end", default = false)
+  var forceSingleEnd: Boolean = config("force_single_end", default = false)
 
   /** skips reads marked by the illumina chastity program.  expecting a string */
-  var filter_chastity: Option[String] = config("filter_chastity")
+  var filterChastity: Option[String] = config("filter_chastity")
 
   /** allows accession names of reads to mismatch in paired-end files */
-  var allow_pe_name_mismatch: Boolean = config("allow_pe_name_mismatch", default = false)
+  var allowPeNameMismatch: Boolean = config("allow_pe_name_mismatch", default = false)
 
   /** uncompress gzipped input files */
   var gunzip: Boolean = config("gunzip", default = false)
@@ -98,61 +98,61 @@ class Gsnap(val root: Configurable) extends BiopetCommandLineFunction with Refer
   var batch: Option[Int] = config("batch")
 
   /** whether to expand the genomic offsets index */
-  var expand_offsets: Option[Int] = config("expand_offsets")
+  var expandOffsets: Option[Int] = config("expand_offsets")
 
   /** maximum number of mismatches allowed (if not specified, then */
-  var max_mismatches: Option[Float] = config("max_mismatches")
+  var maxMismatches: Option[Float] = config("max_mismatches")
 
   /** whether to count unknown (n) characters in the query as a mismatch */
-  var query_unk_mismatch: Option[Int] = config("query_unk_mismatch")
+  var queryUnkMismatch: Option[Int] = config("query_unk_mismatch")
 
   /** whether to count unknown (n) characters in the genome as a mismatch */
-  var genome_unk_mismatch: Option[Int] = config("genome_unk_mismatch")
+  var genomeUnkMismatch: Option[Int] = config("genome_unk_mismatch")
 
   /** maximum number of alignments to find (default 1000) */
   var maxsearch: Option[Int] = config("maxsearch")
 
   /** threshold for computing a terminal alignment (from one end of the */
-  var terminal_threshold: Option[Int] = config("terminal_threshold")
+  var terminalThreshold: Option[Int] = config("terminal_threshold")
 
   /** threshold alignment length in bp for a terminal alignment result to be printed (in bp) */
-  var terminal_output_minlength: Option[Int] = config("terminal_output_minlength")
+  var terminalOutputMinlength: Option[Int] = config("terminal_output_minlength")
 
   /** penalty for an indel (default 2) */
-  var indel_penalty: Option[Int] = config("indel_penalty")
+  var indelPenalty: Option[Int] = config("indel_penalty")
 
   /** minimum length at end required for indel alignments (default 4) */
-  var indel_endlength: Option[Int] = config("indel_endlength")
+  var indelEndlength: Option[Int] = config("indel_endlength")
 
   /** maximum number of middle insertions allowed (default 9) */
-  var max_middle_insertions: Option[Int] = config("max_middle_insertions")
+  var maxMiddleInsertions: Option[Int] = config("max_middle_insertions")
 
   /** maximum number of middle deletions allowed (default 30) */
-  var max_middle_deletions: Option[Int] = config("max_middle_deletions")
+  var maxMiddleDeletions: Option[Int] = config("max_middle_deletions")
 
   /** maximum number of end insertions allowed (default 3) */
-  var max_end_insertions: Option[Int] = config("max_end_insertions")
+  var maxEndInsertions: Option[Int] = config("max_end_insertions")
 
   /** maximum number of end deletions allowed (default 6) */
-  var max_end_deletions: Option[Int] = config("max_end_deletions")
+  var maxEndDeletions: Option[Int] = config("max_end_deletions")
 
   /** report suboptimal hits beyond best hit (default 0) */
-  var suboptimal_levels: Option[Int] = config("suboptimal_levels")
+  var suboptimalLevels: Option[Int] = config("suboptimal_levels")
 
   /** method for removing adapters from reads.  currently allowed values: off, paired */
-  var adapter_strip: Option[String] = config("adapter_strip")
+  var adapterStrip: Option[String] = config("adapter_strip")
 
   /** score to use for mismatches when trimming at ends (default is -3; */
-  var trim_mismatch_score: Option[Int] = config("trim_mismatch_score")
+  var trimMismatchScore: Option[Int] = config("trim_mismatch_score")
 
   /** score to use for indels when trimming at ends (default is -4; */
-  var trim_indel_score: Option[Int] = config("trim_indel_score")
+  var trimIndelScore: Option[Int] = config("trim_indel_score")
 
   /** directory for snps index files (created using snpindex) (default is */
   var snpsdir: Option[String] = config("snpsdir")
 
   /** use database containing known snps (in <string>.iit, built */
-  var use_snps: Option[String] = config("use_snps")
+  var useSnps: Option[String] = config("use_snps")
 
   /** directory for methylcytosine index files (created using cmetindex) */
   var cmetdir: Option[String] = config("cmetdir")
@@ -167,166 +167,166 @@ class Gsnap(val root: Configurable) extends BiopetCommandLineFunction with Refer
   var tallydir: Option[String] = config("tallydir")
 
   /** use this tally iit file to resolve concordant multiple results */
-  var use_tally: Option[String] = config("use_tally")
+  var useTally: Option[String] = config("use_tally")
 
   /** directory for runlength iit file to resolve concordant multiple results (default is */
-  var runlengthdir: Option[String] = config("runlengthdir")
+  var runLengthDir: Option[String] = config("runlengthdir")
 
   /** use this runlength iit file to resolve concordant multiple results */
-  var use_runlength: Option[String] = config("use_runlength")
+  var useRunlength: Option[String] = config("use_runlength")
 
   /** cases to use gmap for complex alignments containing multiple splices or indels */
-  var gmap_mode: Option[String] = config("gmap_mode")
+  var gmapMode: Option[String] = config("gmap_mode")
 
   /** try gmap pairsearch on nearby genomic regions if best score (the total */
-  var trigger_score_for_gmap: Option[Int] = config("trigger_score_for_gmap")
+  var triggerScoreForGmap: Option[Int] = config("trigger_score_for_gmap")
 
   /** keep gmap hit only if it has this many consecutive matches (default 20) */
-  var gmap_min_match_length: Option[Int] = config("gmap_min_match_length")
+  var gmapMinMatchLength: Option[Int] = config("gmap_min_match_length")
 
   /** extra mismatch/indel score allowed for gmap alignments (default 3) */
-  var gmap_allowance: Option[Int] = config("gmap_allowance")
+  var gmapAllowance: Option[Int] = config("gmap_allowance")
 
   /** perform gmap pairsearch on nearby genomic regions up to this many */
-  var max_gmap_pairsearch: Option[Int] = config("max_gmap_pairsearch")
+  var maxGmapPairsearch: Option[Int] = config("max_gmap_pairsearch")
 
   /** perform gmap terminal on nearby genomic regions up to this many */
-  var max_gmap_terminal: Option[Int] = config("max_gmap_terminal")
+  var maxGmapTerminal: Option[Int] = config("max_gmap_terminal")
 
   /** perform gmap improvement on nearby genomic regions up to this many */
-  var max_gmap_improvement: Option[Int] = config("max_gmap_improvement")
+  var maxGmapImprovement: Option[Int] = config("max_gmap_improvement")
 
   /** allow microexons only if one of the splice site probabilities is */
-  var microexon_spliceprob: Option[Float] = config("microexon_spliceprob")
+  var microExonSpliceprob: Option[Float] = config("microexon_spliceprob")
 
   /** look for novel splicing (0=no (default), 1=yes) */
-  var novelsplicing: Option[Int] = config("novelsplicing")
+  var novelSplicing: Option[Int] = config("novelsplicing")
 
   /** directory for splicing involving known sites or known introns, */
-  var splicingdir: Option[String] = config("splicingdir")
+  var splicingDir: Option[String] = config("splicingdir")
 
   /** look for splicing involving known sites or known introns */
-  var use_splicing: Option[String] = config("use_splicing")
+  var useSplicing: Option[String] = config("use_splicing")
 
   /** for ambiguous known splicing at ends of the read, do not clip at the */
-  var ambig_splice_noclip: Boolean = config("ambig_splice_noclip", default = false)
+  var ambigSpliceNoclip: Boolean = config("ambig_splice_noclip", default = false)
 
   /** definition of local novel splicing event (default 200000) */
-  var localsplicedist: Option[Int] = config("localsplicedist")
+  var localSpliceDist: Option[Int] = config("localsplicedist")
 
   /** distance to look for novel splices at the ends of reads (default 50000) */
-  var novelend_splicedist: Option[Int] = config("novelend_splicedist")
+  var novelEndSplicedist: Option[Int] = config("novelend_splicedist")
 
   /** penalty for a local splice (default 0).  counts against mismatches allowed */
-  var local_splice_penalty: Option[Int] = config("local_splice_penalty")
+  var localSplicePenalty: Option[Int] = config("local_splice_penalty")
 
   /** penalty for a distant splice (default 1).  a distant splice is one where */
-  var distant_splice_penalty: Option[Int] = config("distant_splice_penalty")
+  var distantSplicePenalty: Option[Int] = config("distant_splice_penalty")
 
   /** minimum length at end required for distant spliced alignments (default 20, min */
-  var distant_splice_endlength: Option[Int] = config("distant_splice_endlength")
+  var distantSpliceEndlength: Option[Int] = config("distant_splice_endlength")
 
   /** minimum length at end required for short-end spliced alignments (default 2, */
-  var shortend_splice_endlength: Option[Int] = config("shortend_splice_endlength")
+  var shortendSpliceEndlength: Option[Int] = config("shortend_splice_endlength")
 
   /** minimum identity at end required for distant spliced alignments (default 0.95) */
-  var distant_splice_identity: Option[Float] = config("distant_splice_identity")
+  var distantSpliceIdentity: Option[Float] = config("distant_splice_identity")
 
   /** (not currently implemented) */
-  var antistranded_penalty: Option[Int] = config("antistranded_penalty")
+  var antiStrandedPenalty: Option[Int] = config("antistranded_penalty")
 
   /** report distant splices on the same chromosome as a single splice, if possible */
-  var merge_distant_samechr: Boolean = config("merge_distant_samechr", default = false)
+  var mergeDistantSamechr: Boolean = config("merge_distant_samechr", default = false)
 
   /** max total genomic length for dna-seq paired reads, or other reads */
-  var pairmax_dna: Option[Int] = config("pairmax_dna")
+  var pairmaxDna: Option[Int] = config("pairmax_dna")
 
   /** max total genomic length for rna-seq paired reads, or other reads */
-  var pairmax_rna: Option[Int] = config("pairmax_rna")
+  var pairmaxRna: Option[Int] = config("pairmax_rna")
 
   /** expected paired-end length, used for calling splices in medial part of */
-  var pairexpect: Option[Int] = config("pairexpect")
+  var pairExpect: Option[Int] = config("pairexpect")
 
   /** allowable deviation from expected paired-end length, used for */
-  var pairdev: Option[Int] = config("pairdev")
+  var pairDev: Option[Int] = config("pairdev")
 
   /** protocol for input quality scores.  allowed values: */
-  var quality_protocol: Option[String] = config("quality_protocol")
+  var qualityProtocol: Option[String] = config("quality_protocol")
 
   /** fastq quality scores are zero at this ascii value */
-  var quality_zero_score: Option[Int] = config("quality_zero_score")
+  var qualityZeroScore: Option[Int] = config("quality_zero_score")
 
   /** shift fastq quality scores by this amount in output */
-  var quality_print_shift: Option[Int] = config("quality_print_shift")
+  var qualityPrintShift: Option[Int] = config("quality_print_shift")
 
   /** maximum number of paths to print (default 100) */
   var npaths: Option[Int] = config("npaths")
 
   /** if more than maximum number of paths are found, */
-  var quiet_if_excessive: Boolean = config("quiet_if_excessive", default = false)
+  var quietIfExcessive: Boolean = config("quiet_if_excessive", default = false)
 
   /** print output in same order as input (relevant */
   var ordered: Boolean = config("ordered", default = false)
 
   /** for gsnap output in snp-tolerant alignment, shows all differences */
-  var show_refdiff: Boolean = config("show_refdiff", default = false)
+  var showRefdiff: Boolean = config("show_refdiff", default = false)
 
   /** for paired-end reads whose alignments overlap, clip the overlapping region */
-  var clip_overlap: Boolean = config("clip_overlap", default = false)
+  var clipOverlap: Boolean = config("clip_overlap", default = false)
 
   /** print detailed information about snps in reads (works only if -v also selected) */
-  var print_snps: Boolean = config("print_snps", default = false)
+  var printSnps: Boolean = config("print_snps", default = false)
 
   /** print only failed alignments, those with no results */
-  var failsonly: Boolean = config("failsonly", default = false)
+  var failsOnly: Boolean = config("failsonly", default = false)
 
   /** exclude printing of failed alignments */
-  var nofails: Boolean = config("nofails", default = false)
+  var noFails: Boolean = config("nofails", default = false)
 
   /** print completely failed alignments as input fasta or fastq format */
-  var fails_as_input: Boolean = config("fails_as_input", default = false)
+  var failsAsInput: Boolean = config("fails_as_input", default = false)
 
   /** another format type, other than default */
   var format: Option[String] = config("format")
 
   /** basename for multiple-file output, separately for nomapping, */
-  var split_output: Option[String] = config("split_output")
+  var splitOutput: Option[String] = config("split_output")
 
   /** when --split-output is given, this flag will append output to the */
-  var append_output: Boolean = config("append_output", default = false)
+  var appendOutput: Boolean = config("append_output", default = false)
 
   /** buffer size, in queries, for output thread (default 1000).  when the number */
-  var output_buffer_size: Option[Int] = config("output_buffer_size")
+  var outputBufferSize: Option[Int] = config("output_buffer_size")
 
   /** do not print headers beginning with '@' */
-  var no_sam_headers: Boolean = config("no_sam_headers", default = false)
+  var noSamHeaders: Boolean = config("no_sam_headers", default = false)
 
   /** print headers only for this batch, as specified by -q */
-  var sam_headers_batch: Option[Int] = config("sam_headers_batch")
+  var samHeadersBatch: Option[Int] = config("sam_headers_batch")
 
   /** insert 0m in cigar between adjacent insertions and deletions */
-  var sam_use_0M: Boolean = config("sam_use_0M", default = false)
+  var samUse0M: Boolean = config("sam_use_0M", default = false)
 
   /** allows multiple alignments to be marked as primary if they */
-  var sam_multiple_primaries: Boolean = config("sam_multiple_primaries", default = false)
+  var samMultiplePrimaries: Boolean = config("sam_multiple_primaries", default = false)
 
   /** for rna-seq alignments, disallows xs:a:? when the sense direction */
-  var force_xs_dir: Boolean = config("force_xs_dir", default = false)
+  var forceXsDir: Boolean = config("force_xs_dir", default = false)
 
   /** in md string, when known snps are given by the -v flag, */
-  var md_lowercase_snp: Boolean = config("md_lowercase_snp", default = false)
+  var mdLowercaseSnp: Boolean = config("md_lowercase_snp", default = false)
 
   /** value to put into read-group id (rg-id) field */
-  var read_group_id: Option[String] = config("read_group_id")
+  var readGroupId: Option[String] = config("read_group_id")
 
   /** value to put into read-group name (rg-sm) field */
-  var read_group_name: Option[String] = config("read_group_name")
+  var readGroupName: Option[String] = config("read_group_name")
 
   /** value to put into read-group library (rg-lb) field */
-  var read_group_library: Option[String] = config("read_group_library")
+  var readGroupLibrary: Option[String] = config("read_group_library")
 
   /** value to put into read-group library (rg-pl) field */
-  var read_group_platform: Option[String] = config("read_group_platform")
+  var readGroupPlatform: Option[String] = config("read_group_platform")
 
   def versionRegex = """.* version (.*)""".r
   def versionCommand = executable + " --version"
@@ -343,99 +343,99 @@ class Gsnap(val root: Configurable) extends BiopetCommandLineFunction with Refer
     required(executable) +
       optional("--dir", dir) +
       optional("--db", db) +
-      optional("--use-sarray", use_sarray) +
+      optional("--use-sarray", useSarray) +
       optional("--kmer", kmer) +
       optional("--sampling", sampling) +
       optional("--part", part) +
-      optional("--input-buffer-size", input_buffer_size) +
-      optional("--barcode-length", barcode_length) +
+      optional("--input-buffer-size", inputBufferSize) +
+      optional("--barcode-length", barcodeLength) +
       optional("--orientation", orientation) +
-      optional("--fastq-id-start", fastq_id_start) +
-      optional("--fastq-id-end", fastq_id_end) +
-      conditional(force_single_end, "--force-single-end") +
-      optional("--filter-chastity", filter_chastity) +
-      conditional(allow_pe_name_mismatch, "--allow-pe-name-mismatch") +
+      optional("--fastq-id-start", fastqIdStart) +
+      optional("--fastq-id-end", fastqIdEnd) +
+      conditional(forceSingleEnd, "--force-single-end") +
+      optional("--filter-chastity", filterChastity) +
+      conditional(allowPeNameMismatch, "--allow-pe-name-mismatch") +
       conditional(gunzip, "--gunzip") +
       conditional(bunzip2, "--bunzip2") +
       optional("--batch", batch) +
-      optional("--expand-offsets", expand_offsets) +
-      optional("--max-mismatches", max_mismatches) +
-      optional("--query-unk-mismatch", query_unk_mismatch) +
-      optional("--genome-unk-mismatch", genome_unk_mismatch) +
+      optional("--expand-offsets", expandOffsets) +
+      optional("--max-mismatches", maxMismatches) +
+      optional("--query-unk-mismatch", queryUnkMismatch) +
+      optional("--genome-unk-mismatch", genomeUnkMismatch) +
       optional("--maxsearch", maxsearch) +
-      optional("--terminal-threshold", terminal_threshold) +
-      optional("--terminal-output-minlength", terminal_output_minlength) +
-      optional("--indel-penalty", indel_penalty) +
-      optional("--indel-endlength", indel_endlength) +
-      optional("--max-middle-insertions", max_middle_insertions) +
-      optional("--max-middle-deletions", max_middle_deletions) +
-      optional("--max-end-insertions", max_end_insertions) +
-      optional("--max-end-deletions", max_end_deletions) +
-      optional("--suboptimal-levels", suboptimal_levels) +
-      optional("--adapter-strip", adapter_strip) +
-      optional("--trim-mismatch-score", trim_mismatch_score) +
-      optional("--trim-indel-score", trim_indel_score) +
+      optional("--terminal-threshold", terminalThreshold) +
+      optional("--terminal-output-minlength", terminalOutputMinlength) +
+      optional("--indel-penalty", indelPenalty) +
+      optional("--indel-endlength", indelEndlength) +
+      optional("--max-middle-insertions", maxMiddleInsertions) +
+      optional("--max-middle-deletions", maxMiddleDeletions) +
+      optional("--max-end-insertions", maxEndInsertions) +
+      optional("--max-end-deletions", maxEndDeletions) +
+      optional("--suboptimal-levels", suboptimalLevels) +
+      optional("--adapter-strip", adapterStrip) +
+      optional("--trim-mismatch-score", trimMismatchScore) +
+      optional("--trim-indel-score", trimIndelScore) +
       optional("--snpsdir", snpsdir) +
-      optional("--use-snps", use_snps) +
+      optional("--use-snps", useSnps) +
       optional("--cmetdir", cmetdir) +
       optional("--atoidir", atoidir) +
       optional("--mode", mode) +
       optional("--tallydir", tallydir) +
-      optional("--use-tally", use_tally) +
-      optional("--runlengthdir", runlengthdir) +
-      optional("--use-runlength", use_runlength) +
+      optional("--use-tally", useTally) +
+      optional("--runlengthdir", runLengthDir) +
+      optional("--use-runlength", useRunlength) +
       optional("--nthreads", threads) +
-      optional("--gmap-mode", gmap_mode) +
-      optional("--trigger-score-for-gmap", trigger_score_for_gmap) +
-      optional("--gmap-min-match-length", gmap_min_match_length) +
-      optional("--gmap-allowance", gmap_allowance) +
-      optional("--max-gmap-pairsearch", max_gmap_pairsearch) +
-      optional("--max-gmap-terminal", max_gmap_terminal) +
-      optional("--max-gmap-improvement", max_gmap_improvement) +
-      optional("--microexon-spliceprob", microexon_spliceprob) +
-      optional("--novelsplicing", novelsplicing) +
-      optional("--splicingdir", splicingdir) +
-      optional("--use-splicing", use_splicing) +
-      conditional(ambig_splice_noclip, "--ambig-splice-noclip") +
-      optional("--localsplicedist", localsplicedist) +
-      optional("--novelend-splicedist", novelend_splicedist) +
-      optional("--local-splice-penalty", local_splice_penalty) +
-      optional("--distant-splice-penalty", distant_splice_penalty) +
-      optional("--distant-splice-endlength", distant_splice_endlength) +
-      optional("--shortend-splice-endlength", shortend_splice_endlength) +
-      optional("--distant-splice-identity", distant_splice_identity) +
-      optional("--antistranded-penalty", antistranded_penalty) +
-      conditional(merge_distant_samechr, "--merge-distant-samechr") +
-      optional("--pairmax-dna", pairmax_dna) +
-      optional("--pairmax-rna", pairmax_rna) +
-      optional("--pairexpect", pairexpect) +
-      optional("--pairdev", pairdev) +
-      optional("--quality-protocol", quality_protocol) +
-      optional("--quality-zero-score", quality_zero_score) +
-      optional("--quality-print-shift", quality_print_shift) +
+      optional("--gmap-mode", gmapMode) +
+      optional("--trigger-score-for-gmap", triggerScoreForGmap) +
+      optional("--gmap-min-match-length", gmapMinMatchLength) +
+      optional("--gmap-allowance", gmapAllowance) +
+      optional("--max-gmap-pairsearch", maxGmapPairsearch) +
+      optional("--max-gmap-terminal", maxGmapTerminal) +
+      optional("--max-gmap-improvement", maxGmapImprovement) +
+      optional("--microexon-spliceprob", microExonSpliceprob) +
+      optional("--novelsplicing", novelSplicing) +
+      optional("--splicingdir", splicingDir) +
+      optional("--use-splicing", useSplicing) +
+      conditional(ambigSpliceNoclip, "--ambig-splice-noclip") +
+      optional("--localsplicedist", localSpliceDist) +
+      optional("--novelend-splicedist", novelEndSplicedist) +
+      optional("--local-splice-penalty", localSplicePenalty) +
+      optional("--distant-splice-penalty", distantSplicePenalty) +
+      optional("--distant-splice-endlength", distantSpliceEndlength) +
+      optional("--shortend-splice-endlength", shortendSpliceEndlength) +
+      optional("--distant-splice-identity", distantSpliceIdentity) +
+      optional("--antistranded-penalty", antiStrandedPenalty) +
+      conditional(mergeDistantSamechr, "--merge-distant-samechr") +
+      optional("--pairmax-dna", pairmaxDna) +
+      optional("--pairmax-rna", pairmaxRna) +
+      optional("--pairexpect", pairExpect) +
+      optional("--pairdev", pairDev) +
+      optional("--quality-protocol", qualityProtocol) +
+      optional("--quality-zero-score", qualityZeroScore) +
+      optional("--quality-print-shift", qualityPrintShift) +
       optional("--npaths", npaths) +
-      conditional(quiet_if_excessive, "--quiet-if-excessive") +
+      conditional(quietIfExcessive, "--quiet-if-excessive") +
       conditional(ordered, "--ordered") +
-      conditional(show_refdiff, "--show-refdiff") +
-      conditional(clip_overlap, "--clip-overlap") +
-      conditional(print_snps, "--print-snps") +
-      conditional(failsonly, "--failsonly") +
-      conditional(nofails, "--nofails") +
-      conditional(fails_as_input, "--fails-as-input") +
+      conditional(showRefdiff, "--show-refdiff") +
+      conditional(clipOverlap, "--clip-overlap") +
+      conditional(printSnps, "--print-snps") +
+      conditional(failsOnly, "--failsonly") +
+      conditional(noFails, "--nofails") +
+      conditional(failsAsInput, "--fails-as-input") +
       optional("--format", format) +
-      optional("--split-output", split_output) +
-      conditional(append_output, "--append-output") +
-      optional("--output-buffer-size", output_buffer_size) +
-      conditional(no_sam_headers, "--no-sam-headers") +
-      optional("--sam-headers-batch", sam_headers_batch) +
-      conditional(sam_use_0M, "--sam-use-0M") +
-      conditional(sam_multiple_primaries, "--sam-multiple-primaries") +
-      conditional(force_xs_dir, "--force-xs-dir") +
-      conditional(md_lowercase_snp, "--md-lowercase-snp") +
-      optional("--read-group-id", read_group_id) +
-      optional("--read-group-name", read_group_name) +
-      optional("--read-group-library", read_group_library) +
-      optional("--read-group-platform", read_group_platform) +
+      optional("--split-output", splitOutput) +
+      conditional(appendOutput, "--append-output") +
+      optional("--output-buffer-size", outputBufferSize) +
+      conditional(noSamHeaders, "--no-sam-headers") +
+      optional("--sam-headers-batch", samHeadersBatch) +
+      conditional(samUse0M, "--sam-use-0M") +
+      conditional(samMultiplePrimaries, "--sam-multiple-primaries") +
+      conditional(forceXsDir, "--force-xs-dir") +
+      conditional(mdLowercaseSnp, "--md-lowercase-snp") +
+      optional("--read-group-id", readGroupId) +
+      optional("--read-group-name", readGroupName) +
+      optional("--read-group-library", readGroupLibrary) +
+      optional("--read-group-platform", readGroupPlatform) +
       repeat(input) +
       " > " + required(output)
   }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/kraken/Kraken.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/kraken/Kraken.scala
index e4470ffec4f37c3b2c71a0b88f095cefb079741e..f8623c82ffd15079a319de3ffaced92aed55c4cf 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/kraken/Kraken.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/kraken/Kraken.scala
@@ -29,10 +29,10 @@ class Kraken(val root: Configurable) extends BiopetCommandLineFunction with Vers
   var input: List[File] = _
 
   @Output(doc = "Unidentified reads", required = false)
-  var unclassified_out: Option[File] = None
+  var unclassifiedOut: Option[File] = None
 
   @Output(doc = "Identified reads", required = false)
-  var classified_out: Option[File] = None
+  var classifiedOut: Option[File] = None
 
   @Output(doc = "Output with hits per sequence")
   var output: File = _
@@ -69,8 +69,8 @@ class Kraken(val root: Configurable) extends BiopetCommandLineFunction with Vers
     optional("--threads", nCoresRequest) +
     conditional(quick, "--quick") +
     optional("--min_hits", minHits) +
-    optional("--unclassified-out ", unclassified_out) +
-    optional("--classified-out ", classified_out) +
+    optional("--unclassified-out ", unclassifiedOut) +
+    optional("--classified-out ", classifiedOut) +
     required("--output", output) +
     conditional(preLoad, "--preload") +
     conditional(paired, "--paired") +
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/kraken/KrakenReport.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/kraken/KrakenReport.scala
index aa6e825bbed68c2724e98849019095d5d9ac71ec..00b29970de84add7b4825841c756e25a1adf2cd8 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/kraken/KrakenReport.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/kraken/KrakenReport.scala
@@ -39,7 +39,7 @@ class KrakenReport(val root: Configurable) extends BiopetCommandLineFunction wit
   }
 
   var db: File = config("db")
-  var show_zeros: Boolean = config("show_zeros", default = false)
+  var showZeros: Boolean = config("show_zeros", default = false)
 
   @Input(doc = "Input raw kraken analysis")
   var input: File = _
@@ -49,7 +49,7 @@ class KrakenReport(val root: Configurable) extends BiopetCommandLineFunction wit
 
   def cmdLine: String = required(executable) +
     required("--db", db) +
-    conditional(show_zeros, "--show-zeros") +
+    conditional(showZeros, "--show-zeros") +
     required(input) +
     " > " + required(output)
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/macs2/Macs2CallPeak.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/macs2/Macs2CallPeak.scala
index 10fb47713bff4ef480159bcfe4fe3eb787751d96..76a0c4ae56641f861f69fcd133b1e8c40f0f1906 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/macs2/Macs2CallPeak.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/macs2/Macs2CallPeak.scala
@@ -29,22 +29,22 @@ class Macs2CallPeak(val root: Configurable) extends Macs2 {
   var control: File = _
 
   @Output(doc = "Output file NARROWPEAKS")
-  private var output_narrow: File = _
+  private var outputNarrow: File = _
 
   @Output(doc = "Output file BROADPEAKS")
-  private var output_broad: File = _
+  private var outputBroad: File = _
 
   @Output(doc = "Output in Excel format")
-  private var output_xls: File = _
+  private var outputXls: File = _
 
   @Output(doc = "R script with Bimodal model")
-  private var output_r: File = _
+  private var outputR: File = _
 
   @Output(doc = "Output file Bedgraph")
-  private var output_bdg: File = _
+  private var outputBdg: File = _
 
   @Output(doc = "Output file gappedPeak")
-  private var output_gapped: File = _
+  private var outputGapped: File = _
 
   var fileformat: Option[String] = config("fileformat")
   var gsize: Option[Float] = config("gsize")
@@ -77,12 +77,12 @@ class Macs2CallPeak(val root: Configurable) extends Macs2 {
   override def beforeGraph(): Unit = {
     if (name.isEmpty) throw new IllegalArgumentException("Name is not defined")
     if (outputdir == null) throw new IllegalArgumentException("Outputdir is not defined")
-    output_narrow = new File(outputdir + name.get + ".narrowPeak")
-    output_broad = new File(outputdir + name.get + ".broadPeak")
-    output_xls = new File(outputdir + name.get + ".xls")
-    output_bdg = new File(outputdir + name.get + ".bdg")
-    output_r = new File(outputdir + name.get + ".r")
-    output_gapped = new File(outputdir + name.get + ".gappedPeak")
+    outputNarrow = new File(outputdir + name.get + ".narrowPeak")
+    outputBroad = new File(outputdir + name.get + ".broadPeak")
+    outputXls = new File(outputdir + name.get + ".xls")
+    outputBdg = new File(outputdir + name.get + ".bdg")
+    outputR = new File(outputdir + name.get + ".r")
+    outputGapped = new File(outputdir + name.get + ".gappedPeak")
   }
 
   /** Returns command to execute */
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/pindel/PindelCaller.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/pindel/PindelCaller.scala
index 5769a9763abe460440ad71c62c458ef451a4f986..a3b3fc84b412eab6e50f6dc2c166fb58c0ab5ec0 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/pindel/PindelCaller.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/pindel/PindelCaller.scala
@@ -116,12 +116,12 @@ class PindelCaller(val root: Configurable) extends BiopetCommandLineFunction wit
   var ploidy: Option[File] = config("ploidy")
   var detectDD: Boolean = config("detect_DD", default = false)
 
-  var MAX_DD_BREAKPOINT_DISTANCE: Option[Int] = config("MAX_DD_BREAKPOINT_DISTANCE")
-  var MAX_DISTANCE_CLUSTER_READS: Option[Int] = config("MAX_DISTANCE_CLUSTER_READS")
-  var MIN_DD_CLUSTER_SIZE: Option[Int] = config("MIN_DD_CLUSTER_SIZE")
-  var MIN_DD_BREAKPOINT_SUPPORT: Option[Int] = config("MIN_DD_BREAKPOINT_SUPPORT")
-  var MIN_DD_MAP_DISTANCE: Option[Int] = config("MIN_DD_MAP_DISTANCE")
-  var DD_REPORT_DUPLICATION_READS: Option[Int] = config("DD_REPORT_DUPLICATION_READS")
+  var maxDdBreakpointDistance: Option[Int] = config("max_dd_breakpoint_distance")
+  var maxDistanceClusterReads: Option[Int] = config("max_distance_cluster_reads")
+  var minDdClusterSize: Option[Int] = config("min_dd_cluster_size")
+  var minDdBreakpointSupport: Option[Int] = config("min_dd_Breakpoint_support")
+  var minDdMapDistance: Option[Int] = config("min_dd_map_distance")
+  var ddReportDuplicationReads: Option[Int] = config("dd_report_duplication_reads")
 
   override def beforeGraph: Unit = {
     if (reference == null) reference = referenceFasta()
@@ -201,12 +201,12 @@ class PindelCaller(val root: Configurable) extends BiopetCommandLineFunction wit
     optional("--name_of_logfile", nameOfLogfile) +
     optional("--Ploidy", ploidy) +
     conditional(detectDD, "detect_DD") +
-    optional("--MAX_DD_BREAKPOINT_DISTANCE", MAX_DD_BREAKPOINT_DISTANCE) +
-    optional("--MAX_DISTANCE_CLUSTER_READS", MAX_DISTANCE_CLUSTER_READS) +
-    optional("--MIN_DD_CLUSTER_SIZE", MIN_DD_CLUSTER_SIZE) +
-    optional("--MIN_DD_BREAKPOINT_SUPPORT", MIN_DD_BREAKPOINT_SUPPORT) +
-    optional("--MIN_DD_MAP_DISTANCE", MIN_DD_MAP_DISTANCE) +
-    optional("--DD_REPORT_DUPLICATION_READS", DD_REPORT_DUPLICATION_READS)
+    optional("--MAX_DD_BREAKPOINT_DISTANCE", maxDdBreakpointDistance) +
+    optional("--MAX_DISTANCE_CLUSTER_READS", maxDistanceClusterReads) +
+    optional("--MIN_DD_CLUSTER_SIZE", minDdClusterSize) +
+    optional("--MIN_DD_BREAKPOINT_SUPPORT", minDdBreakpointSupport) +
+    optional("--MIN_DD_MAP_DISTANCE", minDdMapDistance) +
+    optional("--DD_REPORT_DUPLICATION_READS", ddReportDuplicationReads)
 }
 
 object PindelCaller {
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/AssignTaxonomy.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/AssignTaxonomy.scala
index b41dbfd0dcc524f4029da66a4b1d8a6724514399..8ed230df36a3e725615ee40464d50e02337c9c40 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/AssignTaxonomy.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/AssignTaxonomy.scala
@@ -16,38 +16,38 @@ class AssignTaxonomy(val root: Configurable) extends BiopetCommandLineFunction w
   var inputFasta: File = _
 
   @Input(required = false)
-  var read_1_seqs_fp: Option[File] = None
+  var read1SeqsFp: Option[File] = None
 
   @Input(required = false)
-  var read_2_seqs_fp: Option[File] = None
+  var read2SeqsFp: Option[File] = None
 
   @Input(required = false)
-  var id_to_taxonomy_fp: Option[File] = config("id_to_taxonomy_fp")
+  var idToTaxonomyFp: Option[File] = config("id_to_taxonomy_fp")
 
   @Input(required = false)
-  var reference_seqs_fp: Option[File] = config("reference_seqs_fp")
+  var referenceSeqsFp: Option[File] = config("reference_seqs_fp")
 
   @Input(required = false)
-  var training_data_properties_fp: Option[File] = config("training_data_properties_fp")
+  var trainingDataPropertiesFp: Option[File] = config("training_data_properties_fp")
 
-  var single_ok: Boolean = config("single_ok", default = false)
-  var no_single_ok_generic: Boolean = config("no_single_ok_generic", default = false)
+  var singleOk: Boolean = config("single_ok", default = false)
+  var noSingleOkGeneric: Boolean = config("no_single_ok_generic", default = false)
 
-  var amplicon_id_regex: Option[String] = config("amplicon_id_regex")
-  var header_id_regex: Option[String] = config("header_id_regex")
-  var assignment_method: Option[String] = config("assignment_method")
-  var sortmerna_db: Option[String] = config("sortmerna_db")
-  var sortmerna_e_value: Option[String] = config("sortmerna_e_value")
-  var sortmerna_coverage: Option[String] = config("sortmerna_coverage")
-  var sortmerna_best_N_alignments: Option[String] = config("sortmerna_best_N_alignments")
-  var sortmerna_threads: Option[String] = config("sortmerna_threads")
-  var blast_db: Option[String] = config("blast_db")
+  var ampliconIdRegex: Option[String] = config("amplicon_id_regex")
+  var headerIdRegex: Option[String] = config("header_id_regex")
+  var assignmentMethod: Option[String] = config("assignment_method")
+  var sortmernaDb: Option[String] = config("sortmerna_db")
+  var sortmernaEValue: Option[String] = config("sortmerna_e_value")
+  var sortmernaCoverage: Option[String] = config("sortmerna_coverage")
+  var sortmernaBestNAlignments: Option[String] = config("sortmerna_best_N_alignments")
+  var sortmernaThreads: Option[String] = config("sortmerna_threads")
+  var blastDb: Option[String] = config("blast_db")
   var confidence: Option[String] = config("confidence")
-  var min_consensus_fraction: Option[String] = config("min_consensus_fraction")
+  var minConsensusFraction: Option[String] = config("min_consensus_fraction")
   var similarity: Option[String] = config("similarity")
-  var uclust_max_accepts: Option[String] = config("uclust_max_accepts")
-  var rdp_max_memory: Option[String] = config("rdp_max_memory")
-  var blast_e_value: Option[String] = config("blast_e_value")
+  var uclustMaxAccepts: Option[String] = config("uclust_max_accepts")
+  var rdpMaxMemory: Option[String] = config("rdp_max_memory")
+  var blastEValue: Option[String] = config("blast_e_value")
   var outputDir: File = _
 
   def versionCommand = executable + " --version"
@@ -61,27 +61,27 @@ class AssignTaxonomy(val root: Configurable) extends BiopetCommandLineFunction w
 
   def cmdLine = executable +
     required("-i", inputFasta) +
-    optional("--read_1_seqs_fp", read_1_seqs_fp) +
-    optional("--read_2_seqs_fp", read_2_seqs_fp) +
-    optional("-t", id_to_taxonomy_fp) +
-    optional("-r", reference_seqs_fp) +
-    optional("-p", training_data_properties_fp) +
-    optional("--amplicon_id_regex", amplicon_id_regex) +
-    optional("--header_id_regex", header_id_regex) +
-    optional("--assignment_method", assignment_method) +
-    optional("--sortmerna_db", sortmerna_db) +
-    optional("--sortmerna_e_value", sortmerna_e_value) +
-    optional("--sortmerna_coverage", sortmerna_coverage) +
-    optional("--sortmerna_best_N_alignments", sortmerna_best_N_alignments) +
-    optional("--sortmerna_threads", sortmerna_threads) +
-    optional("--blast_db", blast_db) +
+    optional("--read_1_seqs_fp", read1SeqsFp) +
+    optional("--read_2_seqs_fp", read2SeqsFp) +
+    optional("-t", idToTaxonomyFp) +
+    optional("-r", referenceSeqsFp) +
+    optional("-p", trainingDataPropertiesFp) +
+    optional("--amplicon_id_regex", ampliconIdRegex) +
+    optional("--header_id_regex", headerIdRegex) +
+    optional("--assignment_method", assignmentMethod) +
+    optional("--sortmerna_db", sortmernaDb) +
+    optional("--sortmerna_e_value", sortmernaEValue) +
+    optional("--sortmerna_coverage", sortmernaCoverage) +
+    optional("--sortmerna_best_N_alignments", sortmernaBestNAlignments) +
+    optional("--sortmerna_threads", sortmernaThreads) +
+    optional("--blast_db", blastDb) +
     optional("--confidence", confidence) +
-    optional("--min_consensus_fraction", min_consensus_fraction) +
+    optional("--min_consensus_fraction", minConsensusFraction) +
     optional("--similarity", similarity) +
-    optional("--uclust_max_accepts", uclust_max_accepts) +
-    optional("--rdp_max_memory", rdp_max_memory) +
-    optional("--blast_e_value", blast_e_value) +
+    optional("--uclust_max_accepts", uclustMaxAccepts) +
+    optional("--rdp_max_memory", rdpMaxMemory) +
+    optional("--blast_e_value", blastEValue) +
     required("--output_dir", outputDir) +
-    conditional(single_ok, "--single_ok") +
-    conditional(no_single_ok_generic, "--no_single_ok_generic")
+    conditional(singleOk, "--single_ok") +
+    conditional(noSingleOkGeneric, "--no_single_ok_generic")
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/MergeOtuMaps.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/MergeOtuMaps.scala
index a538e97a1c281256b007ee37e115da48bfb0e393..1a3d61b30a2c47334488fc8f647fd9c843506b61 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/MergeOtuMaps.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/MergeOtuMaps.scala
@@ -21,7 +21,7 @@ class MergeOtuMaps(val root: Configurable) extends BiopetCommandLineFunction wit
   @Output(required = true)
   var outputFile: File = _
 
-  var failures_fp: Option[File] = None
+  var failuresFp: Option[File] = None
 
   override def beforeGraph(): Unit = {
     super.beforeGraph()
@@ -35,5 +35,5 @@ class MergeOtuMaps(val root: Configurable) extends BiopetCommandLineFunction wit
       case _                        => ""
     }) +
     required("-o", outputFile) +
-    optional("--failures_fp", failures_fp)
+    optional("--failures_fp", failuresFp)
 }
\ No newline at end of file
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickClosedReferenceOtus.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickClosedReferenceOtus.scala
index 265a6d21f941bfae7bc3c6b7c742993b9638e6c3..d0ddad3afa35e043670cf71e6050ee8224e2d8ae 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickClosedReferenceOtus.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickClosedReferenceOtus.scala
@@ -23,18 +23,18 @@ class PickClosedReferenceOtus(val root: Configurable) extends BiopetCommandLineF
   def versionRegex = """Version: (.*)""".r
 
   @Input(required = false)
-  var parameter_fp: Option[File] = config("parameter_fp")
+  var parameterFp: Option[File] = config("parameter_fp")
 
   @Input(required = false)
-  var reference_fp: Option[File] = config("reference_fp")
+  var referenceFp: Option[File] = config("reference_fp")
 
   @Input(required = false)
-  var taxonomy_fp: Option[File] = config("taxonomy_fp")
+  var taxonomyFp: Option[File] = config("taxonomy_fp")
 
-  var assign_taxonomy: Boolean = config("assign_taxonomy", default = false)
+  var assignTaxonomy: Boolean = config("assign_taxonomy", default = false)
   var force: Boolean = config("force", default = false)
-  var print_only: Boolean = config("print_only", default = false)
-  var suppress_taxonomy_assignment: Boolean = config("suppress_taxonomy_assignment", default = false)
+  var printOnly: Boolean = config("print_only", default = false)
+  var suppressTaxonomyAssignment: Boolean = config("suppress_taxonomy_assignment", default = false)
 
   def otuTable = new File(outputDir, "otu_table.biom")
   def otuMap = new File(outputDir, "uclust_ref_picked_otus" + File.separator + "seqs_otus.txt")
@@ -49,13 +49,13 @@ class PickClosedReferenceOtus(val root: Configurable) extends BiopetCommandLineF
   def cmdLine = executable + required("-f") +
     required("-i", inputFasta) +
     required("-o", outputDir) +
-    optional("--reference_fp", reference_fp) +
-    optional("--parameter_fp", parameter_fp) +
-    optional("--taxonomy_fp", taxonomy_fp) +
-    conditional(assign_taxonomy, "--assign_taxonomy") +
+    optional("--reference_fp", referenceFp) +
+    optional("--parameter_fp", parameterFp) +
+    optional("--taxonomy_fp", taxonomyFp) +
+    conditional(assignTaxonomy, "--assign_taxonomy") +
     conditional(force, "--force") +
-    conditional(print_only, "--print_only") +
-    conditional(suppress_taxonomy_assignment, "--suppress_taxonomy_assignment") +
+    conditional(printOnly, "--print_only") +
+    conditional(suppressTaxonomyAssignment, "--suppress_taxonomy_assignment") +
     (if (threads > 1) required("-a") + required("-O", threads) else "")
 
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickOtus.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickOtus.scala
index f83c59aa9dc61ad74297e707f2a1a6452780b9a2..47c7fde0abd298559e76d898443cbd7512ac1992 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickOtus.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickOtus.scala
@@ -22,59 +22,59 @@ class PickOtus(val root: Configurable) extends BiopetCommandLineFunction with Ve
   def versionCommand = executable + " --version"
   def versionRegex = """Version: (.*)""".r
 
-  var otu_picking_method: Option[String] = config("otu_picking_method")
-  var clustering_algorithm: Option[String] = config("clustering_algorithm")
-  var max_cdhit_memory: Option[Int] = config("max_cdhit_memory")
-  var refseqs_fp: Option[String] = config("refseqs_fp")
-  var blast_db: Option[String] = config("blast_db")
-  var max_e_value_blast: Option[String] = config("max_e_value_blast")
-  var sortmerna_db: Option[String] = config("sortmerna_db")
-  var sortmerna_e_value: Option[Double] = config("sortmerna_e_value")
-  var sortmerna_coverage: Option[Double] = config("sortmerna_coverage")
-  var sortmerna_tabular: Boolean = config("sortmerna_tabular", default = false)
-  var sortmerna_best_N_alignments: Option[Int] = config("sortmerna_best_N_alignments")
-  var sortmerna_max_pos: Option[Int] = config("sortmerna_max_pos")
-  var min_aligned_percent: Option[Double] = config("min_aligned_percent")
+  var otuPickingMethod: Option[String] = config("otu_picking_method")
+  var clusteringAlgorithm: Option[String] = config("clustering_algorithm")
+  var maxCdhitMemory: Option[Int] = config("max_cdhit_memory")
+  var refseqsFp: Option[String] = config("refseqs_fp")
+  var blastDb: Option[String] = config("blast_db")
+  var maxEValueBlast: Option[String] = config("max_e_value_blast")
+  var sortmernaDb: Option[String] = config("sortmerna_db")
+  var sortmernaEValue: Option[Double] = config("sortmerna_e_value")
+  var sortmernaCoverage: Option[Double] = config("sortmerna_coverage")
+  var sortmernaTabular: Boolean = config("sortmerna_tabular", default = false)
+  var sortmernaBestNAlignments: Option[Int] = config("sortmerna_best_N_alignments")
+  var sortmernaMaxPos: Option[Int] = config("sortmerna_max_pos")
+  var minAlignedPercent: Option[Double] = config("min_aligned_percent")
   var similarity: Option[Double] = config("similarity")
-  var sumaclust_exact: Option[String] = config("sumaclust_exact")
-  var sumaclust_l: Boolean = config("sumaclust_l", default = false)
-  var denovo_otu_id_prefix: Option[String] = config("denovo_otu_id_prefix")
-  var swarm_resolution: Option[String] = config("swarm_resolution")
-  var trie_reverse_seqs: Boolean = config("trie_reverse_seqs", default = false)
-  var prefix_prefilter_length: Option[String] = config("prefix_prefilter_length")
-  var trie_prefilter: Option[String] = config("trie_prefilter")
-  var prefix_length: Option[String] = config("prefix_length")
-  var suffix_length: Option[String] = config("suffix_length")
-  var enable_rev_strand_match: Boolean = config("enable_rev_strand_match", default = false)
-  var suppress_presort_by_abundance_uclust: Boolean = config("suppress_presort_by_abundance_uclust", default = false)
-  var optimal_uclust: Boolean = config("optimal_uclust", default = false)
-  var exact_uclust: Boolean = config("exact_uclust", default = false)
-  var user_sort: Boolean = config("user_sort", default = false)
-  var suppress_new_clusters: Boolean = config("suppress_new_clusters", default = false)
-  var max_accepts: Option[String] = config("max_accepts")
-  var max_rejects: Option[String] = config("max_rejects")
+  var sumaclustExact: Option[String] = config("sumaclust_exact")
+  var sumaclustL: Boolean = config("sumaclust_l", default = false)
+  var denovoOtuIdPrefix: Option[String] = config("denovo_otu_id_prefix")
+  var swarmResolution: Option[String] = config("swarm_resolution")
+  var trieReverseSeqs: Boolean = config("trie_reverse_seqs", default = false)
+  var prefixPrefilterLength: Option[String] = config("prefix_prefilter_length")
+  var triePrefilter: Option[String] = config("trie_prefilter")
+  var prefixLength: Option[String] = config("prefix_length")
+  var suffixLength: Option[String] = config("suffix_length")
+  var enableRevStrandMatch: Boolean = config("enable_rev_strand_match", default = false)
+  var suppressPresortByAbundanceUclust: Boolean = config("suppress_presort_by_abundance_uclust", default = false)
+  var optimalUclust: Boolean = config("optimal_uclust", default = false)
+  var exactUclust: Boolean = config("exact_uclust", default = false)
+  var userSort: Boolean = config("user_sort", default = false)
+  var suppressNewClusters: Boolean = config("suppress_new_clusters", default = false)
+  var maxAccepts: Option[String] = config("max_accepts")
+  var maxRejects: Option[String] = config("max_rejects")
   var stepwords: Option[String] = config("stepwords")
-  var word_length: Option[String] = config("word_length")
-  var suppress_uclust_stable_sort: Boolean = config("suppress_uclust_stable_sort", default = false)
-  var suppress_prefilter_exact_match: Boolean = config("suppress_prefilter_exact_match", default = false)
-  var save_uc_files: Boolean = config("save_uc_files", default = false)
-  var percent_id_err: Option[String] = config("percent_id_err")
-  var minsize: Option[String] = config("minsize")
-  var abundance_skew: Option[String] = config("abundance_skew")
-  var db_filepath: Option[String] = config("db_filepath")
-  var perc_id_blast: Option[String] = config("perc_id_blast")
-  var de_novo_chimera_detection: Boolean = config("de_novo_chimera_detection", default = false)
-  var suppress_de_novo_chimera_detection: Boolean = config("suppress_de_novo_chimera_detection", default = false)
-  var reference_chimera_detection: Option[String] = config("reference_chimera_detection")
-  var suppress_reference_chimera_detection: Option[String] = config("suppress_reference_chimera_detection")
-  var cluster_size_filtering: Option[String] = config("cluster_size_filtering")
-  var suppress_cluster_size_filtering: Option[String] = config("suppress_cluster_size_filtering")
-  var remove_usearch_logs: Boolean = config("remove_usearch_logs", default = false)
-  var derep_fullseq: Boolean = config("derep_fullseq", default = false)
-  var non_chimeras_retention: Option[String] = config("non_chimeras_retention")
+  var wordLength: Option[String] = config("word_length")
+  var suppressUclustStableSort: Boolean = config("suppress_uclust_stable_sort", default = false)
+  var suppressPrefilterExactMatch: Boolean = config("suppress_prefilter_exact_match", default = false)
+  var saveUcFiles: Boolean = config("save_uc_files", default = false)
+  var percentIdErr: Option[String] = config("percent_id_err")
+  var minSize: Option[String] = config("minsize")
+  var abundanceSkew: Option[String] = config("abundance_skew")
+  var dbFilepath: Option[String] = config("db_filepath")
+  var percIdBlast: Option[String] = config("perc_id_blast")
+  var deNovoChimeraDetection: Boolean = config("de_novo_chimera_detection", default = false)
+  var suppressDeNovoChimeraDetection: Boolean = config("suppress_de_novo_chimera_detection", default = false)
+  var referenceChimeraDetection: Option[String] = config("reference_chimera_detection")
+  var suppressReferenceChimeraDetection: Option[String] = config("suppress_reference_chimera_detection")
+  var clusterSizeFiltering: Option[String] = config("cluster_size_filtering")
+  var suppressClusterSizeFiltering: Option[String] = config("suppress_cluster_size_filtering")
+  var removeUsearchLogs: Boolean = config("remove_usearch_logs", default = false)
+  var derepFullseq: Boolean = config("derep_fullseq", default = false)
+  var nonChimerasRetention: Option[String] = config("non_chimeras_retention")
   var minlen: Option[String] = config("minlen")
-  var usearch_fast_cluster: Boolean = config("usearch_fast_cluster", default = false)
-  var usearch61_sort_method: Option[String] = config("usearch61_sort_method")
+  var usearchFastCluster: Boolean = config("usearch_fast_cluster", default = false)
+  var usearch61SortMethod: Option[String] = config("usearch61_sort_method")
   var sizeorder: Boolean = config("sizeorder", default = false)
 
   private lazy val name = inputFasta.getName.stripSuffix(".fasta").stripSuffix(".fa").stripSuffix(".fna")
@@ -93,59 +93,59 @@ class PickOtus(val root: Configurable) extends BiopetCommandLineFunction with Ve
   def cmdLine = executable +
     required("-i", inputFasta) +
     required("-o", outputDir) +
-    optional("-m", otu_picking_method) +
-    optional("-c", clustering_algorithm) +
-    optional("-M", max_cdhit_memory) +
-    optional("-r", refseqs_fp) +
-    optional("-b", blast_db) +
-    optional("-e", max_e_value_blast) +
-    optional("--sortmerna_db", sortmerna_db) +
-    optional("--sortmerna_e_value", sortmerna_e_value) +
-    optional("--sortmerna_coverage", sortmerna_coverage) +
-    conditional(sortmerna_tabular, "--sortmerna_tabular") +
-    optional("--sortmerna_best_N_alignments", sortmerna_best_N_alignments) +
-    optional("--sortmerna_max_pos", sortmerna_max_pos) +
-    optional("--min_aligned_percent", min_aligned_percent) +
+    optional("-m", otuPickingMethod) +
+    optional("-c", clusteringAlgorithm) +
+    optional("-M", maxCdhitMemory) +
+    optional("-r", refseqsFp) +
+    optional("-b", blastDb) +
+    optional("-e", maxEValueBlast) +
+    optional("--sortmerna_db", sortmernaDb) +
+    optional("--sortmerna_e_value", sortmernaEValue) +
+    optional("--sortmerna_coverage", sortmernaCoverage) +
+    conditional(sortmernaTabular, "--sortmerna_tabular") +
+    optional("--sortmerna_best_N_alignments", sortmernaBestNAlignments) +
+    optional("--sortmerna_max_pos", sortmernaMaxPos) +
+    optional("--min_aligned_percent", minAlignedPercent) +
     optional("--similarity", similarity) +
-    optional("--sumaclust_exact", sumaclust_exact) +
-    conditional(sumaclust_l, "--sumaclust_l") +
-    optional("--denovo_otu_id_prefix", denovo_otu_id_prefix) +
-    optional("--swarm_resolution", swarm_resolution) +
-    conditional(trie_reverse_seqs, "--trie_reverse_seqs") +
-    optional("--prefix_prefilter_length", prefix_prefilter_length) +
-    optional("--trie_prefilter", trie_prefilter) +
-    optional("--prefix_length", prefix_length) +
-    optional("--suffix_length", suffix_length) +
-    conditional(enable_rev_strand_match, "--enable_rev_strand_match") +
-    conditional(suppress_presort_by_abundance_uclust, "--suppress_presort_by_abundance_uclust") +
-    conditional(optimal_uclust, "--optimal_uclust") +
-    conditional(exact_uclust, "--exact_uclust") +
-    conditional(user_sort, "--user_sort") +
-    conditional(suppress_new_clusters, "--suppress_new_clusters") +
-    optional("--max_accepts", max_accepts) +
-    optional("--max_rejects", max_rejects) +
+    optional("--sumaclust_exact", sumaclustExact) +
+    conditional(sumaclustL, "--sumaclust_l") +
+    optional("--denovo_otu_id_prefix", denovoOtuIdPrefix) +
+    optional("--swarm_resolution", swarmResolution) +
+    conditional(trieReverseSeqs, "--trie_reverse_seqs") +
+    optional("--prefix_prefilter_length", prefixPrefilterLength) +
+    optional("--trie_prefilter", triePrefilter) +
+    optional("--prefix_length", prefixLength) +
+    optional("--suffix_length", suffixLength) +
+    conditional(enableRevStrandMatch, "--enable_rev_strand_match") +
+    conditional(suppressPresortByAbundanceUclust, "--suppress_presort_by_abundance_uclust") +
+    conditional(optimalUclust, "--optimal_uclust") +
+    conditional(exactUclust, "--exact_uclust") +
+    conditional(userSort, "--user_sort") +
+    conditional(suppressNewClusters, "--suppress_new_clusters") +
+    optional("--max_accepts", maxAccepts) +
+    optional("--max_rejects", maxRejects) +
     optional("--stepwords", stepwords) +
-    optional("--word_length", word_length) +
-    conditional(suppress_uclust_stable_sort, "--suppress_uclust_stable_sort") +
-    conditional(suppress_prefilter_exact_match, "--suppress_prefilter_exact_match") +
-    conditional(save_uc_files, "--save_uc_files") +
-    optional("--percent_id_err", percent_id_err) +
-    optional("--minsize", minsize) +
-    optional("--abundance_skew", abundance_skew) +
-    optional("--db_filepath", db_filepath) +
-    optional("--perc_id_blast", perc_id_blast) +
-    conditional(de_novo_chimera_detection, "--de_novo_chimera_detection") +
-    conditional(suppress_de_novo_chimera_detection, "--suppress_de_novo_chimera_detection") +
-    optional("--reference_chimera_detection", reference_chimera_detection) +
-    optional("--suppress_reference_chimera_detection", suppress_reference_chimera_detection) +
-    optional("--cluster_size_filtering", cluster_size_filtering) +
-    optional("--suppress_cluster_size_filtering", suppress_cluster_size_filtering) +
-    conditional(remove_usearch_logs, "--remove_usearch_logs") +
-    conditional(derep_fullseq, "--derep_fullseq") +
-    optional("--non_chimeras_retention", non_chimeras_retention) +
+    optional("--word_length", wordLength) +
+    conditional(suppressUclustStableSort, "--suppress_uclust_stable_sort") +
+    conditional(suppressPrefilterExactMatch, "--suppress_prefilter_exact_match") +
+    conditional(saveUcFiles, "--save_uc_files") +
+    optional("--percent_id_err", percentIdErr) +
+    optional("--minsize", minSize) +
+    optional("--abundance_skew", abundanceSkew) +
+    optional("--db_filepath", dbFilepath) +
+    optional("--perc_id_blast", percIdBlast) +
+    conditional(deNovoChimeraDetection, "--de_novo_chimera_detection") +
+    conditional(suppressDeNovoChimeraDetection, "--suppress_de_novo_chimera_detection") +
+    optional("--reference_chimera_detection", referenceChimeraDetection) +
+    optional("--suppress_reference_chimera_detection", suppressReferenceChimeraDetection) +
+    optional("--cluster_size_filtering", clusterSizeFiltering) +
+    optional("--suppress_cluster_size_filtering", suppressClusterSizeFiltering) +
+    conditional(removeUsearchLogs, "--remove_usearch_logs") +
+    conditional(derepFullseq, "--derep_fullseq") +
+    optional("--non_chimeras_retention", nonChimerasRetention) +
     optional("--minlen", minlen) +
-    conditional(usearch_fast_cluster, "--usearch_fast_cluster") +
-    optional("--usearch61_sort_method", usearch61_sort_method) +
+    conditional(usearchFastCluster, "--usearch_fast_cluster") +
+    optional("--usearch61_sort_method", usearch61SortMethod) +
     conditional(sizeorder, "--sizeorder") +
     optional("--threads", threads)
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickRepSet.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickRepSet.scala
index 5496c673cf515df6d735c406ed46cb145d49f389..a29f821097a1c529ba930eb8d2fdd4f6f3db646a 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickRepSet.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/PickRepSet.scala
@@ -22,7 +22,7 @@ class PickRepSet(val root: Configurable) extends BiopetCommandLineFunction with
   var logFile: Option[File] = None
 
   @Input(required = false)
-  var reference_seqs_fp: Option[File] = config("reference_seqs_fp")
+  var referenceSeqsFp: Option[File] = config("reference_seqs_fp")
 
   @Input(required = false)
   var fastaInput: Option[File] = None
@@ -32,14 +32,14 @@ class PickRepSet(val root: Configurable) extends BiopetCommandLineFunction with
   def versionCommand = executable + " --version"
   def versionRegex = """Version: (.*)""".r
 
-  var rep_set_picking_method: Option[String] = config("rep_set_picking_method")
+  var repSetPickingMethod: Option[String] = config("rep_set_picking_method")
 
   def cmdLine = executable +
     required("-i", inputFile) +
     required("-o", outputFasta) +
-    optional("-m", rep_set_picking_method) +
+    optional("-m", repSetPickingMethod) +
     optional("-f", fastaInput) +
     optional("-l", logFile) +
     optional("-s", sortBy) +
-    optional("-r", reference_seqs_fp)
+    optional("-r", referenceSeqsFp)
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/SplitLibrariesFastq.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/SplitLibrariesFastq.scala
index 25db2dd9ae2347e4439ac482627d547f6a54e310..6b02239a78e598edfeec772faf385b8c094a9685 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/SplitLibrariesFastq.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/qiime/SplitLibrariesFastq.scala
@@ -22,22 +22,22 @@ class SplitLibrariesFastq(val root: Configurable) extends BiopetCommandLineFunct
   var v: Option[String] = config("v")
   var m: Option[String] = config("m")
   var b: Option[String] = config("b")
-  var store_qual_scores: Boolean = config("store_qual_scores", default = false)
-  var sample_ids: List[String] = Nil
-  var store_demultiplexed_fastq: Boolean = config("store_demultiplexed_fastq", default = false)
-  var retain_unassigned_reads: Boolean = config("retain_unassigned_reads", default = false)
+  var storeQualScores: Boolean = config("store_qual_scores", default = false)
+  var sampleIds: List[String] = Nil
+  var storeDemultiplexedFastq: Boolean = config("store_demultiplexed_fastq", default = false)
+  var retainUnassignedReads: Boolean = config("retain_unassigned_reads", default = false)
   var r: Option[Int] = config("r")
   var p: Option[Double] = config("p")
   var n: Option[Int] = config("n")
   var s: Option[Int] = config("s")
-  var rev_comp_barcode: Boolean = config("rev_comp_barcode", default = false)
-  var rev_comp_mapping_barcodes: Boolean = config("rev_comp_mapping_barcodes", default = false)
-  var rev_comp: Boolean = config("rev_comp", default = false)
+  var revCompBarcode: Boolean = config("rev_comp_barcode", default = false)
+  var revCompMappingBarcodes: Boolean = config("rev_comp_mapping_barcodes", default = false)
+  var revComp: Boolean = config("rev_comp", default = false)
   var q: Option[Int] = config("q")
-  var last_bad_quality_char: Option[String] = config("last_bad_quality_char")
-  var barcode_type: Option[String] = config("barcode_type")
-  var max_barcode_errors: Option[Double] = config("max_barcode_errors")
-  var phred_offset: Option[String] = config("phred_offset")
+  var lastBadQualityChar: Option[String] = config("last_bad_quality_char")
+  var barcodeType: Option[String] = config("barcode_type")
+  var maxBarcodeErrors: Option[Double] = config("max_barcode_errors")
+  var phredOffset: Option[String] = config("phred_offset")
 
   def outputSeqs = new File(outputDir, "seqs.fna")
 
@@ -52,25 +52,25 @@ class SplitLibrariesFastq(val root: Configurable) extends BiopetCommandLineFunct
     optional("-v", v) +
     optional("-m", m) +
     optional("-b", b) +
-    conditional(store_qual_scores, "--store_qual_scores") +
-    (sample_ids match {
+    conditional(storeQualScores, "--store_qual_scores") +
+    (sampleIds match {
       case l: List[_] if l.nonEmpty => optional("--sample_ids", l.mkString(","))
       case _                        => ""
     }) +
-    conditional(store_demultiplexed_fastq, "--store_demultiplexed_fastq") +
-    conditional(retain_unassigned_reads, "--retain_unassigned_reads") +
+    conditional(storeDemultiplexedFastq, "--store_demultiplexed_fastq") +
+    conditional(retainUnassignedReads, "--retain_unassigned_reads") +
     optional("-r", r) +
     optional("-p", p) +
     optional("-n", n) +
     optional("-s", s) +
-    conditional(rev_comp_barcode, "--rev_comp_barcode") +
-    conditional(rev_comp_mapping_barcodes, "--rev_comp_mapping_barcodes") +
-    conditional(rev_comp, "--rev_comp") +
+    conditional(revCompBarcode, "--rev_comp_barcode") +
+    conditional(revCompMappingBarcodes, "--rev_comp_mapping_barcodes") +
+    conditional(revComp, "--rev_comp") +
     optional("-q", q) +
-    optional("--last_bad_quality_char", last_bad_quality_char) +
-    optional("--barcode_type", barcode_type) +
-    optional("--max_barcode_errors", max_barcode_errors) +
-    optional("--phred_offset", phred_offset) +
+    optional("--last_bad_quality_char", lastBadQualityChar) +
+    optional("--barcode_type", barcodeType) +
+    optional("--max_barcode_errors", maxBarcodeErrors) +
+    optional("--phred_offset", phredOffset) +
     (input match {
       case l: List[_] if l.nonEmpty => required("-i", l.mkString(","))
       case _                        => ""
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaMarkdup.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaMarkdup.scala
index 2f89774db35e5f9d6f22518aa2ff3588f70d053f..80e999db78762dc44731ca0fb990c6598b074c62 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaMarkdup.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaMarkdup.scala
@@ -30,23 +30,23 @@ class SambambaMarkdup(val root: Configurable) extends Sambamba {
   @Output(doc = "Markdup output bam")
   var output: File = _
 
-  var remove_duplicates: Boolean = config("remove_duplicates", default = false)
+  var removeDuplicates: Boolean = config("remove_duplicates", default = false)
 
   // @doc: compression_level 6 is average, 0 = no compression, 9 = best
-  val compression_level: Option[Int] = config("compression_level", default = 6)
-  val hash_table_size: Option[Int] = config("hash-table-size", default = 262144)
-  val overflow_list_size: Option[Int] = config("overflow-list-size", default = 200000)
-  val io_buffer_size: Option[Int] = config("io-buffer-size", default = 128)
+  val compressionLevel: Option[Int] = config("compression_level", default = 6)
+  val hashTableSize: Option[Int] = config("hash-table-size", default = 262144)
+  val overflowListSize: Option[Int] = config("overflow-list-size", default = 200000)
+  val ioBufferSize: Option[Int] = config("io-buffer-size", default = 128)
 
   /** Returns command to execute */
   def cmdLine = required(executable) +
     required("markdup") +
-    conditional(remove_duplicates, "--remove-duplicates") +
+    conditional(removeDuplicates, "--remove-duplicates") +
     optional("-t", nCoresRequest) +
-    optional("-l", compression_level) +
-    optional("--hash-table-size=", hash_table_size, spaceSeparated = false) +
-    optional("--overflow-list-size=", overflow_list_size, spaceSeparated = false) +
-    optional("--io-buffer-size=", io_buffer_size, spaceSeparated = false) +
+    optional("-l", compressionLevel) +
+    optional("--hash-table-size=", hashTableSize, spaceSeparated = false) +
+    optional("--overflow-list-size=", overflowListSize, spaceSeparated = false) +
+    optional("--io-buffer-size=", ioBufferSize, spaceSeparated = false) +
     required(input) +
     required(output)
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaMerge.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaMerge.scala
index 83464fa4972e6f1aa3b9f74733ff8589985e91d8..4cfb1bc9925ec38c91a9eba8248112fbff145120 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaMerge.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaMerge.scala
@@ -31,13 +31,13 @@ class SambambaMerge(val root: Configurable) extends Sambamba {
   var output: File = _
 
   // @doc: compression_level 6 is average, 0 = no compression, 9 = best
-  val compression_level: Option[Int] = config("compression_level", default = 6)
+  val compressionLevel: Option[Int] = config("compression_level", default = 6)
 
   /** Returns command to execute */
   def cmdLine = required(executable) +
     required("merge") +
     optional("-t", nCoresRequest) +
-    optional("-l", compression_level) +
+    optional("-l", compressionLevel) +
     required(output) +
     repeat("", input)
 }
diff --git a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaView.scala b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaView.scala
index ca2470c6094192afef8da6de7b76b6ca1809c1e3..d5f9245ee3a8863c2aa6ac9469da7bab1691ecbe 100644
--- a/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaView.scala
+++ b/public/biopet-extensions/src/main/scala/nl/lumc/sasc/biopet/extensions/sambamba/SambambaView.scala
@@ -33,7 +33,7 @@ class SambambaView(val root: Configurable) extends Sambamba {
   var filter: Option[String] = _
   val format: Option[String] = config("format", default = "bam")
   val regions: Option[File] = config("regions")
-  val compression_level: Option[Int] = config("compression_level", default = 6)
+  val compressionLevel: Option[Int] = config("compression_level", default = 6)
 
   /** Returns command to execute */
   def cmdLine = required(executable) +
@@ -42,7 +42,7 @@ class SambambaView(val root: Configurable) extends Sambamba {
     optional("--nthreads", nCoresRequest) +
     optional("--format", format.get) +
     optional("--regions", regions) +
-    optional("--compression-level", compression_level) +
+    optional("--compression-level", compressionLevel) +
     required("--output-filename", output) +
     required(input)
 }
diff --git a/public/biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/SeqStat.scala b/public/biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/SeqStat.scala
index 27b17496f28a58fec370deb04eb226c2cedcfe37..fe59522374391b9ed5af74df61ed34d58db15339 100644
--- a/public/biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/SeqStat.scala
+++ b/public/biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/SeqStat.scala
@@ -84,10 +84,10 @@ object SeqStat extends ToolCommand {
    */
   def detectPhredEncoding(quals: mutable.ArrayBuffer[Long]): Unit = {
     // substract 1 on high value, because we start from index 0
-    val qual_low_boundery = quals.takeWhile(_ == 0).length
-    val qual_high_boundery = quals.length - 1
+    val qualLowBoundery = quals.takeWhile(_ == 0).length
+    val qualHighBoundery = quals.length - 1
 
-    (qual_low_boundery < 59, qual_high_boundery > 74) match {
+    (qualLowBoundery < 59, qualHighBoundery > 74) match {
       case (false, true) => phredEncoding = Solexa
       // TODO: check this later on
       // complex case, we cannot tell wheter this is a sanger or solexa
diff --git a/public/biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/VepNormalizer.scala b/public/biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/VepNormalizer.scala
index f9f0fe472686589f47c52b04d2c3e97a181a026e..50893046b949792ddccedf00752b34c632dc00ee 100644
--- a/public/biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/VepNormalizer.scala
+++ b/public/biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/VepNormalizer.scala
@@ -64,13 +64,13 @@ object VepNormalizer extends ToolCommand {
     versionCheck(header)
     logger.debug("VCF version OK")
     logger.debug("Parsing header")
-    val new_infos = parseCsq(header)
+    val newInfos = parseCsq(header)
     header.setWriteCommandLine(true)
     val writer = new AsyncVariantContextWriter(new VariantContextWriterBuilder().
       setOutputFile(output).setReferenceDictionary(header.getSequenceDictionary)
       build ())
 
-    for (info <- new_infos) {
+    for (info <- newInfos) {
       val tmpheaderline = new VCFInfoHeaderLine(info, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "A VEP annotation")
       header.addMetaDataLine(tmpheaderline)
     }
@@ -81,7 +81,7 @@ object VepNormalizer extends ToolCommand {
     writer.writeHeader(header)
     logger.debug("Wrote header to file")
 
-    normalize(reader, writer, new_infos, commandArgs.mode, commandArgs.removeCSQ)
+    normalize(reader, writer, newInfos, commandArgs.mode, commandArgs.removeCSQ)
     writer.close()
     logger.debug("Closed writer")
     reader.close()
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/AnnotateVcfWithBedTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/AnnotateVcfWithBedTest.scala
index f128a6b42e38b47b0518668a1916cd057a2ea159..65af6ae0e36c062196fff6306321deac5028281b 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/AnnotateVcfWithBedTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/AnnotateVcfWithBedTest.scala
@@ -39,20 +39,20 @@ class AnnotateVcfWithBedTest extends TestNGSuite with MockitoSugar with Matchers
   val rand = new Random()
 
   @Test def testOutputTypeVcf() = {
-    val tmp_path = "/tmp/VcfFilter_" + rand.nextString(10) + ".vcf"
-    val arguments: Array[String] = Array("-I", vepped_path, "-o", tmp_path, "-B", bed, "-f", "testing")
+    val tmpPath = "/tmp/VcfFilter_" + rand.nextString(10) + ".vcf"
+    val arguments: Array[String] = Array("-I", vepped_path, "-o", tmpPath, "-B", bed, "-f", "testing")
     main(arguments)
   }
 
   @Test def testOutputTypeBcf() = {
-    val tmp_path = "/tmp/VcfFilter_" + rand.nextString(10) + ".bcf"
-    val arguments: Array[String] = Array("-I", vepped_path, "-o", tmp_path, "-B", bed, "-f", "testing")
+    val tmpPath = "/tmp/VcfFilter_" + rand.nextString(10) + ".bcf"
+    val arguments: Array[String] = Array("-I", vepped_path, "-o", tmpPath, "-B", bed, "-f", "testing")
     main(arguments)
   }
 
   @Test def testOutputTypeVcfGz() = {
-    val tmp_path = "/tmp/VcfFilter_" + rand.nextString(10) + ".vcf.gz"
-    val arguments: Array[String] = Array("-I", vepped_path, "-o", tmp_path, "-B", bed, "-f", "testing")
+    val tmpPath = "/tmp/VcfFilter_" + rand.nextString(10) + ".vcf.gz"
+    val arguments: Array[String] = Array("-I", vepped_path, "-o", tmpPath, "-B", bed, "-f", "testing")
     main(arguments)
   }
 
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/BastyGenerateFastaTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/BastyGenerateFastaTest.scala
index a88dc561a34fe1b25b39f69e34b25ed30a6f393d..f4455ca6ab7b11a3520a41bf75e9c98c096c9987 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/BastyGenerateFastaTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/BastyGenerateFastaTest.scala
@@ -35,11 +35,11 @@ class BastyGenerateFastaTest extends TestNGSuite with MockitoSugar with Matchers
     Paths.get(getClass.getResource(p).toURI).toString
   }
 
-  val vepped_path = resourcePath("/VEP_oneline.vcf")
-  val vepped = new File(vepped_path)
-  val bam_path = resourcePath("/paired01.bam")
-  val chrQ_path = resourcePath("/chrQ.vcf.gz")
-  val chrQRef_path = resourcePath("/fake_chrQ.fa")
+  val veppedPath = resourcePath("/VEP_oneline.vcf")
+  val vepped = new File(veppedPath)
+  val bamPath = resourcePath("/paired01.bam")
+  val chrQPath = resourcePath("/chrQ.vcf.gz")
+  val chrQRefPath = resourcePath("/fake_chrQ.fa")
   val bam = new File(resourcePath("/paired01.bam"))
   val chrQ = new File(resourcePath("/chrQ.vcf.gz"))
   val chrQRef = new File(resourcePath("/fake_chrQ.fa"))
@@ -50,7 +50,7 @@ class BastyGenerateFastaTest extends TestNGSuite with MockitoSugar with Matchers
     val tmppath = tmp.getAbsolutePath
     tmp.deleteOnExit()
 
-    val arguments = Array("-V", chrQ_path, "--outputVariants", tmppath, "--sampleName", "Sample_101", "--reference", chrQRef_path, "--outputName", "test")
+    val arguments = Array("-V", chrQPath, "--outputVariants", tmppath, "--sampleName", "Sample_101", "--reference", chrQRefPath, "--outputName", "test")
     main(arguments)
   }
 
@@ -60,7 +60,7 @@ class BastyGenerateFastaTest extends TestNGSuite with MockitoSugar with Matchers
     val tmppath = tmp.getAbsolutePath
     tmp.deleteOnExit()
 
-    val arguments = Array("-V", chrQ_path, "--outputVariants", tmppath, "--bamFile", bam_path, "--sampleName", "Sample_101", "--reference", chrQRef_path, "--outputName", "test")
+    val arguments = Array("-V", chrQPath, "--outputVariants", tmppath, "--bamFile", bamPath, "--sampleName", "Sample_101", "--reference", chrQRefPath, "--outputName", "test")
     main(arguments)
   }
 
@@ -70,7 +70,7 @@ class BastyGenerateFastaTest extends TestNGSuite with MockitoSugar with Matchers
     val tmppath = tmp.getAbsolutePath
     tmp.deleteOnExit()
 
-    val arguments = Array("-V", chrQ_path, "--outputConsensus", tmppath, "--outputConsensusVariants", tmppath, "--bamFile", bam_path, "--sampleName", "Sample_101", "--reference", chrQRef_path, "--outputName", "test")
+    val arguments = Array("-V", chrQPath, "--outputConsensus", tmppath, "--outputConsensusVariants", tmppath, "--bamFile", bamPath, "--sampleName", "Sample_101", "--reference", chrQRefPath, "--outputName", "test")
     main(arguments)
   }
 
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/CheckAllelesVcfInBamTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/CheckAllelesVcfInBamTest.scala
index 1ee6f38d316e076084479edbbface36eba73dc92..7281e4deabcc76b479c3ba5ce38cb3ac487cb397 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/CheckAllelesVcfInBamTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/CheckAllelesVcfInBamTest.scala
@@ -47,24 +47,24 @@ class CheckAllelesVcfInBamTest extends TestNGSuite with MockitoSugar with Matche
   @Test def testOutputTypeVcf() = {
     val tmp = File.createTempFile("CheckAllelesVcfInBam", ".vcf")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", vcf, "-b", bam, "-s", "sample01", "-o", tmp_path)
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", vcf, "-b", bam, "-s", "sample01", "-o", tmpPath)
     main(arguments)
   }
 
   @Test def testOutputTypeVcfGz() = {
     val tmp = File.createTempFile("CheckAllelesVcfInBam", ".vcf.gz")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", vcf, "-b", bam, "-s", "sample01", "-o", tmp_path)
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", vcf, "-b", bam, "-s", "sample01", "-o", tmpPath)
     main(arguments)
   }
 
   @Test def testOutputTypeBcf() = {
     val tmp = File.createTempFile("CheckAllelesVcfInBam", ".bcf")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", vcf, "-b", bam, "-s", "sample01", "-o", tmp_path)
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", vcf, "-b", bam, "-s", "sample01", "-o", tmpPath)
     main(arguments)
   }
 
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/GvcfToBedTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/GvcfToBedTest.scala
index aac4cb6c7e0ddaa0f7e9479110758d95e6cb3ac6..411b632aafea2f0baa799ce076a8e9387f795c64 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/GvcfToBedTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/GvcfToBedTest.scala
@@ -27,7 +27,7 @@ class GvcfToBedTest extends TestNGSuite with Matchers with MockitoSugar {
   val vepped = new File(resourcePath("/VEP_oneline.vcf"))
   val unvepped = new File(resourcePath("/unvepped.vcf"))
 
-  val vepped_path = resourcePath("/VEP_oneline.vcf")
+  val veppedPath = resourcePath("/VEP_oneline.vcf")
 
   @Test def testMinQuality = {
     val reader = new VCFFileReader(vepped, false)
@@ -64,23 +64,23 @@ class GvcfToBedTest extends TestNGSuite with Matchers with MockitoSugar {
   @Test
   def testGvcfToBedInvertedOutput = {
     val tmp = File.createTempFile("gvcf2bedtest", ".bed")
-    val tmp_inv = File.createTempFile("gvcf2bedtest", ".bed")
+    val tmpInv = File.createTempFile("gvcf2bedtest", ".bed")
     tmp.deleteOnExit()
-    tmp_inv.deleteOnExit()
+    tmpInv.deleteOnExit()
     val args: Array[String] = Array("-I", unvepped.getAbsolutePath, "-O", tmp.getAbsolutePath, "-S", "Sample_101",
-      "--minGenomeQuality", "99", "--invertedOutputBed", tmp_inv.getAbsolutePath)
+      "--minGenomeQuality", "99", "--invertedOutputBed", tmpInv.getAbsolutePath)
     main(args)
 
-    Source.fromFile(tmp_inv).getLines().size shouldBe 1
+    Source.fromFile(tmpInv).getLines().size shouldBe 1
 
     val tmp2 = File.createTempFile("gvcf2bedtest", ".bed")
-    val tmp2_inv = File.createTempFile("gvcf2bedtest", ".bed")
+    val tmp2Inv = File.createTempFile("gvcf2bedtest", ".bed")
     tmp2.deleteOnExit()
-    tmp2_inv.deleteOnExit()
+    tmp2Inv.deleteOnExit()
     val args2: Array[String] = Array("-I", unvepped.getAbsolutePath, "-O", tmp.getAbsolutePath, "-S", "Sample_102",
-      "--minGenomeQuality", "3", "--invertedOutputBed", tmp2_inv.getAbsolutePath)
+      "--minGenomeQuality", "3", "--invertedOutputBed", tmp2Inv.getAbsolutePath)
     main(args2)
 
-    Source.fromFile(tmp2_inv).getLines().size shouldBe 0
+    Source.fromFile(tmp2Inv).getLines().size shouldBe 0
   }
 }
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/MergeAllelesTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/MergeAllelesTest.scala
index b7ea9bb9c9b2d3e76b6d1c312106091a2684f9a2..538948557f4a17c55d70d40c04c1ef4f384fd94e 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/MergeAllelesTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/MergeAllelesTest.scala
@@ -51,24 +51,24 @@ class MergeAllelesTest extends TestNGSuite with MockitoSugar with Matchers {
   @Test def testOutputTypeVcf() = {
     val tmp = File.createTempFile("MergeAlleles", ".vcf")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", vepped_path, "-o", tmp_path, "-R", reference)
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", vepped_path, "-o", tmpPath, "-R", reference)
     main(arguments)
   }
 
   @Test def testOutputTypeVcfGz() = {
     val tmp = File.createTempFile("MergeAlleles", ".vcf.gz")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", vepped_path, "-o", tmp_path, "-R", reference)
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", vepped_path, "-o", tmpPath, "-R", reference)
     main(arguments)
   }
 
   @Test def testOutputTypeBcf() = {
     val tmp = File.createTempFile("MergeAlleles", ".bcf")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", vepped_path, "-o", tmp_path, "-R", reference)
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", vepped_path, "-o", tmpPath, "-R", reference)
     main(arguments)
   }
 }
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/MpileupToVcfTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/MpileupToVcfTest.scala
index 46e0ffa932eafe3ac20b8642cf52c3033dd5a733..1406cef299c2198ebcce4290dacf238c959ebe2b 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/MpileupToVcfTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/MpileupToVcfTest.scala
@@ -81,14 +81,14 @@ class MpileupToVcfTest extends TestNGSuite with MockitoSugar with Matchers {
 
     for (record <- vcfReader) {
       val alleles = record.getAlleles.toSet
-      var ref_alleles = alleles -- record.getAlternateAlleles.toSet
+      var refAlleles = alleles -- record.getAlternateAlleles.toSet
 
-      ref_alleles.size should be >= 1
+      refAlleles.size should be >= 1
 
       val realRef = Allele.create(sequenceFile.getSubsequenceAt(record.getContig,
         record.getStart, record.getEnd).getBases, true)
 
-      for (ref <- ref_alleles) {
+      for (ref <- refAlleles) {
         record.extraStrictValidation(ref, realRef, Set(""))
       }
     }
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/SummaryToTsvTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/SummaryToTsvTest.scala
index a53c4dd31abbaa326ff6c5f251c0ee3eeb3ac1c6..e791d8093876fc36057823b862d7a09cc0d8dfd1 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/SummaryToTsvTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/SummaryToTsvTest.scala
@@ -77,13 +77,13 @@ class SummaryToTsvTest extends TestNGSuite with MockitoSugar with Matchers {
 
     val line = values.head._2.keys.map(x => createLine(paths, values, x)).head
     line should equal("value\t")
-    val sample_values = fetchValues(summary, paths, true, false)
-    val sample_line = sample_values.head._2.keys.map(x => createLine(paths, sample_values, x)).head
-    sample_line should equal("016\t")
+    val sampleValues = fetchValues(summary, paths, true, false)
+    val sampleLine = sampleValues.head._2.keys.map(x => createLine(paths, sampleValues, x)).head
+    sampleLine should equal("016\t")
 
-    val lib_values = fetchValues(summary, paths, false, true)
-    val lib_line = lib_values.head._2.keys.map(x => createLine(paths, lib_values, x)).head
-    lib_line should equal("016-L001\tfalse")
+    val libValues = fetchValues(summary, paths, false, true)
+    val libLine = libValues.head._2.keys.map(x => createLine(paths, libValues, x)).head
+    libLine should equal("016-L001\tfalse")
   }
 
 }
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VcfToTsvTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VcfToTsvTest.scala
index ee327392cf6f073088a5bea9b2d449593b7e3fd3..76379526a63c486d362762245487e6b456e4309e 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VcfToTsvTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VcfToTsvTest.scala
@@ -44,31 +44,31 @@ class VcfToTsvTest extends TestNGSuite with MockitoSugar with Matchers {
   @Test def testAllFields() = {
     val tmp = File.createTempFile("VcfToTsv", ".tsv")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", unvepped, "-o", tmp_path, "--all_info")
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", unvepped, "-o", tmpPath, "--all_info")
     main(arguments)
   }
 
   @Test def testSpecificField() = {
     val tmp = File.createTempFile("VcfToTsv", ".tsv")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", vepped, "-o", tmp_path, "-i", "CSQ")
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", vepped, "-o", tmpPath, "-i", "CSQ")
     main(arguments)
   }
 
   @Test def testNewSeparators() = {
     val tmp = File.createTempFile("VcfToTsv", ".tsv")
     tmp.deleteOnExit()
-    val tmp_path = tmp.getAbsolutePath
-    val arguments = Array("-I", vepped, "-o", tmp_path, "--all_info", "--separator", ",", "--list_separator", "|")
+    val tmpPath = tmp.getAbsolutePath
+    val arguments = Array("-I", vepped, "-o", tmpPath, "--all_info", "--separator", ",", "--list_separator", "|")
     main(arguments)
   }
 
   @Test(expectedExceptions = Array(classOf[IllegalArgumentException]))
   def testIdenticalSeparators() = {
-    val tmp_path = "/tmp/VcfToTsv_" + rand.nextString(10) + ".tsv"
-    val arguments = Array("-I", vepped, "-o", tmp_path, "--all_info", "--separator", ",")
+    val tmpPath = "/tmp/VcfToTsv_" + rand.nextString(10) + ".tsv"
+    val arguments = Array("-I", vepped, "-o", tmpPath, "--all_info", "--separator", ",")
     main(arguments)
   }
 
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VcfWithVcfTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VcfWithVcfTest.scala
index a6a70881012480a8e92b9aac1c689e4456f9f8c7..2f1814210f0ccc760da2235969910e5a335524f7 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VcfWithVcfTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VcfWithVcfTest.scala
@@ -105,7 +105,7 @@ class VcfWithVcfTest extends TestNGSuite with MockitoSugar with Matchers {
 
   @Test
   def testFieldMap = {
-    val unvep_record = new VCFFileReader(new File(unveppedPath)).iterator().next()
+    val unvepRecord = new VCFFileReader(new File(unveppedPath)).iterator().next()
 
     var fields = List(new Fields("FG", "FG"))
     fields :::= List(new Fields("FD", "FD"))
@@ -133,7 +133,7 @@ class VcfWithVcfTest extends TestNGSuite with MockitoSugar with Matchers {
     fields :::= List(new Fields("VQSLOD", "VQSLOD"))
     fields :::= List(new Fields("culprit", "culprit"))
 
-    val fieldMap = createFieldMap(fields, List(unvep_record))
+    val fieldMap = createFieldMap(fields, List(unvepRecord))
 
     fieldMap("FG") shouldBe List("intron")
     fieldMap("FD") shouldBe List("unknown")
@@ -163,26 +163,26 @@ class VcfWithVcfTest extends TestNGSuite with MockitoSugar with Matchers {
   }
 
   @Test def testGetSecondaryRecords = {
-    val unvep_record = new VCFFileReader(new File(unveppedPath)).iterator().next()
-    val vep_reader = new VCFFileReader(new File(veppedPath))
-    val vep_record = vep_reader.iterator().next()
+    val unvepRecord = new VCFFileReader(new File(unveppedPath)).iterator().next()
+    val vepReader = new VCFFileReader(new File(veppedPath))
+    val vepRecord = vepReader.iterator().next()
 
-    val secRec = getSecondaryRecords(vep_reader, unvep_record, false)
+    val secRec = getSecondaryRecords(vepReader, unvepRecord, false)
 
-    secRec.foreach(x => identicalVariantContext(x, vep_record) shouldBe true)
+    secRec.foreach(x => identicalVariantContext(x, vepRecord) shouldBe true)
   }
 
   @Test def testCreateRecord = {
-    val unvep_record = new VCFFileReader(new File(unveppedPath)).iterator().next()
-    val vep_reader = new VCFFileReader(new File(veppedPath))
-    val header = vep_reader.getFileHeader
-    val vep_record = vep_reader.iterator().next()
+    val unvepRecord = new VCFFileReader(new File(unveppedPath)).iterator().next()
+    val vepReader = new VCFFileReader(new File(veppedPath))
+    val header = vepReader.getFileHeader
+    val vepRecord = vepReader.iterator().next()
 
-    val secRec = getSecondaryRecords(vep_reader, unvep_record, false)
+    val secRec = getSecondaryRecords(vepReader, unvepRecord, false)
 
     val fieldMap = createFieldMap(List(new Fields("CSQ", "CSQ")), secRec)
-    val created_record = createRecord(fieldMap, unvep_record, List(new Fields("CSQ", "CSQ")), header)
-    identicalVariantContext(created_record, vep_record) shouldBe true
+    val createdRecord = createRecord(fieldMap, unvepRecord, List(new Fields("CSQ", "CSQ")), header)
+    identicalVariantContext(createdRecord, vepRecord) shouldBe true
   }
 
 }
diff --git a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VepNormalizerTest.scala b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VepNormalizerTest.scala
index 53aeddfaf1d6ba87f9e89ac29b31edff8fc5e01b..d84db877f68b41bf0861d4e03cd96cb25be9c767 100644
--- a/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VepNormalizerTest.scala
+++ b/public/biopet-tools/src/test/scala/nl/lumc/sasc/biopet/tools/VepNormalizerTest.scala
@@ -42,49 +42,49 @@ class VepNormalizerTest extends TestNGSuite with MockitoSugar with Matchers {
   val vepped = new File(resourcePath("/VEP_oneline.vcf"))
   val unvepped = new File(resourcePath("/unvepped.vcf"))
 
-  val vepped_path = resourcePath("/VEP_oneline.vcf")
+  val veppedPath = resourcePath("/VEP_oneline.vcf")
 
   val rand = new Random()
 
   @Test def testGzOutputExplode(): Unit = {
     val tmpFile = File.createTempFile("VepNormalizer_", ".vcf.gz")
     tmpFile.deleteOnExit()
-    val arguments: Array[String] = Array("-I", vepped_path, "-O", tmpFile.getAbsolutePath, "-m", "explode")
+    val arguments: Array[String] = Array("-I", veppedPath, "-O", tmpFile.getAbsolutePath, "-m", "explode")
     main(arguments)
   }
 
   @Test def testVcfOutputExplode(): Unit = {
     val tmpFile = File.createTempFile("VepNormalizer_", ".vcf")
     tmpFile.deleteOnExit()
-    val arguments: Array[String] = Array("-I", vepped_path, "-O", tmpFile.getAbsolutePath, "-m", "explode")
+    val arguments: Array[String] = Array("-I", veppedPath, "-O", tmpFile.getAbsolutePath, "-m", "explode")
     main(arguments)
   }
 
   @Test def testBcfOutputExplode(): Unit = {
     val tmpFile = File.createTempFile("VepNormalizer_", ".bcf")
     tmpFile.deleteOnExit()
-    val arguments: Array[String] = Array("-I", vepped_path, "-O", tmpFile.getAbsolutePath, "-m", "explode")
+    val arguments: Array[String] = Array("-I", veppedPath, "-O", tmpFile.getAbsolutePath, "-m", "explode")
     main(arguments)
   }
 
   @Test def testGzOutputStandard(): Unit = {
     val tmpFile = File.createTempFile("VepNormalizer_", ".vcf.gz")
     tmpFile.deleteOnExit()
-    val arguments: Array[String] = Array("-I", vepped_path, "-O", tmpFile.getAbsolutePath, "-m", "standard")
+    val arguments: Array[String] = Array("-I", veppedPath, "-O", tmpFile.getAbsolutePath, "-m", "standard")
     main(arguments)
   }
 
   @Test def testVcfOutputStandard(): Unit = {
     val tmpFile = File.createTempFile("VepNormalizer_", ".vcf")
     tmpFile.deleteOnExit()
-    val arguments: Array[String] = Array("-I", vepped_path, "-O", tmpFile.getAbsolutePath, "-m", "standard")
+    val arguments: Array[String] = Array("-I", veppedPath, "-O", tmpFile.getAbsolutePath, "-m", "standard")
     main(arguments)
   }
 
   @Test def testBcfOutputStandard(): Unit = {
     val tmpFile = File.createTempFile("VepNormalizer_", ".bcf")
     tmpFile.deleteOnExit()
-    val arguments: Array[String] = Array("-I", vepped_path, "-O", tmpFile.getAbsolutePath, "-m", "standard")
+    val arguments: Array[String] = Array("-I", veppedPath, "-O", tmpFile.getAbsolutePath, "-m", "standard")
     main(arguments)
   }
 
@@ -97,22 +97,22 @@ class VepNormalizerTest extends TestNGSuite with MockitoSugar with Matchers {
   @Test def testExplodeVEPLength() = {
     val reader = new VCFFileReader(vepped, false)
     val header = reader.getFileHeader
-    val new_infos = parseCsq(header)
-    explodeTranscripts(reader.iterator().next(), new_infos, removeCsq = true).length should be(11)
+    val newInfos = parseCsq(header)
+    explodeTranscripts(reader.iterator().next(), newInfos, removeCsq = true).length should be(11)
   }
 
   @Test def testStandardVEPLength() = {
     val reader = new VCFFileReader(vepped, false)
     val header = reader.getFileHeader
-    val new_infos = parseCsq(header)
-    Array(standardTranscripts(reader.iterator().next(), new_infos, removeCsq = true)).length should be(1)
+    val newInfos = parseCsq(header)
+    Array(standardTranscripts(reader.iterator().next(), newInfos, removeCsq = true)).length should be(1)
   }
 
   @Test def testStandardVEPAttributeLength() = {
     val reader = new VCFFileReader(vepped, false)
     val header = reader.getFileHeader
-    val new_infos = parseCsq(header)
-    val record = standardTranscripts(reader.iterator().next(), new_infos, removeCsq = true)
+    val newInfos = parseCsq(header)
+    val record = standardTranscripts(reader.iterator().next(), newInfos, removeCsq = true)
     def checkItems(items: Array[String]) = {
       items.foreach { check }
     }
diff --git a/public/flexiprep/src/main/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/Flexiprep.scala b/public/flexiprep/src/main/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/Flexiprep.scala
index 83bde757cc5a0f64a9b9636eae8050dee7668bf8..5dd4dd9aaadd9da44bce94200f9ea49d594cbf69 100644
--- a/public/flexiprep/src/main/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/Flexiprep.scala
+++ b/public/flexiprep/src/main/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/Flexiprep.scala
@@ -28,7 +28,7 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
   def this() = this(null)
 
   @Input(doc = "R1 fastq file (gzipped allowed)", shortName = "R1", required = true)
-  var input_R1: File = _
+  var inputR1: File = _
 
   @Input(doc = "R2 fastq file (gzipped allowed)", shortName = "R2", required = false)
   var input_R2: Option[File] = None
@@ -47,7 +47,7 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
 
   /** Returns files to store in summary */
   def summaryFiles: Map[String, File] = {
-    Map("input_R1" -> input_R1, "output_R1" -> fastqR1Qc) ++
+    Map("input_R1" -> inputR1, "output_R1" -> fastqR1Qc) ++
       (if (paired) Map("input_R2" -> input_R2.get, "output_R2" -> fastqR2Qc.get) else Map())
   }
 
@@ -55,13 +55,13 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
   def summarySettings = Map("skip_trim" -> skipTrim, "skip_clip" -> skipClip, "paired" -> paired)
 
   var paired: Boolean = input_R2.isDefined
-  var R1_name: String = _
-  var R2_name: String = _
+  var R1Name: String = _
+  var R2Name: String = _
 
-  var fastqc_R1: Fastqc = _
-  var fastqc_R2: Fastqc = _
-  var fastqc_R1_after: Fastqc = _
-  var fastqc_R2_after: Fastqc = _
+  var fastqcR1: Fastqc = _
+  var fastqcR2: Fastqc = _
+  var fastqcR1After: Fastqc = _
+  var fastqcR2After: Fastqc = _
 
   override def reportClass = {
     val flexiprepReport = new FlexiprepReport(this)
@@ -76,19 +76,19 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
   /** Function that's need to be executed before the script is accessed */
   def init() {
     require(outputDir != null, "Missing output directory on flexiprep module")
-    require(input_R1 != null, "Missing input R1 on flexiprep module")
+    require(inputR1 != null, "Missing input R1 on flexiprep module")
     require(sampleId != null, "Missing sample ID on flexiprep module")
     require(libId != null, "Missing library ID on flexiprep module")
 
     paired = input_R2.isDefined
 
-    inputFiles :+= new InputFile(input_R1)
+    inputFiles :+= new InputFile(inputR1)
     input_R2.foreach(inputFiles :+= new InputFile(_))
 
-    R1_name = getUncompressedFileName(input_R1)
+    R1Name = getUncompressedFileName(inputR1)
     input_R2.foreach { fileR2 =>
       paired = true
-      R2_name = getUncompressedFileName(fileR2)
+      R2Name = getUncompressedFileName(fileR2)
     }
   }
 
@@ -96,26 +96,26 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
   def biopetScript() {
     runInitialJobs()
 
-    if (paired) runTrimClip(input_R1, input_R2, outputDir)
-    else runTrimClip(input_R1, outputDir)
+    if (paired) runTrimClip(inputR1, input_R2, outputDir)
+    else runTrimClip(inputR1, outputDir)
 
-    val R1_files = for ((k, v) <- outputFiles if k.endsWith("output_R1")) yield v
-    val R2_files = for ((k, v) <- outputFiles if k.endsWith("output_R2")) yield v
-    runFinalize(R1_files.toList, R2_files.toList)
+    val R1Files = for ((k, v) <- outputFiles if k.endsWith("output_R1")) yield v
+    val R2Files = for ((k, v) <- outputFiles if k.endsWith("output_R2")) yield v
+    runFinalize(R1Files.toList, R2Files.toList)
   }
 
   /** Add init non chunkable jobs */
   def runInitialJobs() {
-    outputFiles += ("fastq_input_R1" -> input_R1)
+    outputFiles += ("fastq_input_R1" -> inputR1)
     if (paired) outputFiles += ("fastq_input_R2" -> input_R2.get)
 
-    fastqc_R1 = Fastqc(this, input_R1, new File(outputDir, R1_name + ".fastqc/"))
-    add(fastqc_R1)
-    addSummarizable(fastqc_R1, "fastqc_R1")
-    outputFiles += ("fastqc_R1" -> fastqc_R1.output)
+    fastqcR1 = Fastqc(this, inputR1, new File(outputDir, R1Name + ".fastqc/"))
+    add(fastqcR1)
+    addSummarizable(fastqcR1, "fastqc_R1")
+    outputFiles += ("fastqc_R1" -> fastqcR1.output)
 
     val validateFastq = new ValidateFastq(this)
-    validateFastq.r1Fastq = input_R1
+    validateFastq.r1Fastq = inputR1
     validateFastq.r2Fastq = input_R2
     validateFastq.jobOutputFile = new File(outputDir, ".validate_fastq.log.out")
     add(validateFastq)
@@ -128,22 +128,22 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
     }
 
     if (paired) {
-      fastqc_R2 = Fastqc(this, input_R2.get, new File(outputDir, R2_name + ".fastqc/"))
-      add(fastqc_R2)
-      addSummarizable(fastqc_R2, "fastqc_R2")
-      outputFiles += ("fastqc_R2" -> fastqc_R2.output)
+      fastqcR2 = Fastqc(this, input_R2.get, new File(outputDir, R2Name + ".fastqc/"))
+      add(fastqcR2)
+      addSummarizable(fastqcR2, "fastqc_R2")
+      outputFiles += ("fastqc_R2" -> fastqcR2.output)
     }
 
-    val seqstat_R1 = SeqStat(this, input_R1, outputDir)
-    seqstat_R1.isIntermediate = true
-    add(seqstat_R1)
-    addSummarizable(seqstat_R1, "seqstat_R1")
+    val seqstatR1 = SeqStat(this, inputR1, outputDir)
+    seqstatR1.isIntermediate = true
+    add(seqstatR1)
+    addSummarizable(seqstatR1, "seqstat_R1")
 
     if (paired) {
-      val seqstat_R2 = SeqStat(this, input_R2.get, outputDir)
-      seqstat_R2.isIntermediate = true
-      add(seqstat_R2)
-      addSummarizable(seqstat_R2, "seqstat_R2")
+      val seqstatR2 = SeqStat(this, input_R2.get, outputDir)
+      seqstatR2.isIntermediate = true
+      add(seqstatR2)
+      addSummarizable(seqstatR2, "seqstat_R2")
     }
   }
 
@@ -176,17 +176,17 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
     var R1 = R1_in
     var R2 = R2_in
 
-    val qcCmdR1 = new QcCommand(this, fastqc_R1)
+    val qcCmdR1 = new QcCommand(this, fastqcR1)
     qcCmdR1.input = R1_in
     qcCmdR1.read = "R1"
     qcCmdR1.output = if (paired) new File(outDir, fastqR1Qc.getName.stripSuffix(".gz"))
     else fastqR1Qc
-    qcCmdR1.deps :+= fastqc_R1.output
+    qcCmdR1.deps :+= fastqcR1.output
     qcCmdR1.isIntermediate = paired || !keepQcFastqFiles
     addSummarizable(qcCmdR1, "qc_command_R1")
 
     if (paired) {
-      val qcCmdR2 = new QcCommand(this, fastqc_R2)
+      val qcCmdR2 = new QcCommand(this, fastqcR2)
       qcCmdR2.input = R2_in.get
       qcCmdR2.output = new File(outDir, fastqR2Qc.get.getName.stripSuffix(".gz"))
       qcCmdR2.read = "R2"
@@ -222,8 +222,8 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
         }
       }
 
-      pipe.deps ::= fastqc_R1.output
-      pipe.deps ::= fastqc_R2.output
+      pipe.deps ::= fastqcR1.output
+      pipe.deps ::= fastqcR2.output
       pipe.isIntermediate = !keepQcFastqFiles
       add(pipe)
 
@@ -236,14 +236,14 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
       R1 = qcCmdR1.output
     }
 
-    val seqstat_R1_after = SeqStat(this, R1, outDir)
-    add(seqstat_R1_after)
-    addSummarizable(seqstat_R1_after, "seqstat_R1_qc")
+    val seqstatR1After = SeqStat(this, R1, outDir)
+    add(seqstatR1After)
+    addSummarizable(seqstatR1After, "seqstat_R1_qc")
 
     if (paired) {
-      val seqstat_R2_after = SeqStat(this, R2.get, outDir)
-      add(seqstat_R2_after)
-      addSummarizable(seqstat_R2_after, "seqstat_R2_qc")
+      val seqstatR2After = SeqStat(this, R2.get, outDir)
+      add(seqstatR2After)
+      addSummarizable(seqstatR2After, "seqstat_R2_qc")
     }
 
     outputFiles += (chunk + "output_R1" -> R1)
@@ -283,14 +283,14 @@ class Flexiprep(val root: Configurable) extends QScript with SummaryQScript with
     outputFiles += ("output_R1_gzip" -> fastqR1Qc)
     if (paired) outputFiles += ("output_R2_gzip" -> fastqR2Qc.get)
 
-    fastqc_R1_after = Fastqc(this, fastqR1Qc, new File(outputDir, R1_name + ".qc.fastqc/"))
-    add(fastqc_R1_after)
-    addSummarizable(fastqc_R1_after, "fastqc_R1_qc")
+    fastqcR1After = Fastqc(this, fastqR1Qc, new File(outputDir, R1Name + ".qc.fastqc/"))
+    add(fastqcR1After)
+    addSummarizable(fastqcR1After, "fastqc_R1_qc")
 
     if (paired) {
-      fastqc_R2_after = Fastqc(this, fastqR2Qc.get, new File(outputDir, R2_name + ".qc.fastqc/"))
-      add(fastqc_R2_after)
-      addSummarizable(fastqc_R2_after, "fastqc_R2_qc")
+      fastqcR2After = Fastqc(this, fastqR2Qc.get, new File(outputDir, R2Name + ".qc.fastqc/"))
+      add(fastqcR2After)
+      addSummarizable(fastqcR2After, "fastqc_R2_qc")
     }
 
     addSummaryJobs()
diff --git a/public/flexiprep/src/main/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/QcCommand.scala b/public/flexiprep/src/main/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/QcCommand.scala
index 2b05933820c2b985f733ac5c8df0c44e8163ed55..5d92bf92b5aa8fe4b3c98a2dbc71efa76c5a2588 100644
--- a/public/flexiprep/src/main/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/QcCommand.scala
+++ b/public/flexiprep/src/main/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/QcCommand.scala
@@ -115,18 +115,18 @@ class QcCommand(val root: Configurable, val fastqc: Fastqc) extends BiopetComman
 
     trim = if (!flexiprep.skipTrim) {
       val sickle = new Sickle(root)
-      sickle.output_stats = new File(flexiprep.outputDir, s"${flexiprep.sampleId.getOrElse("x")}-${flexiprep.libId.getOrElse("x")}.$read.trim.stats")
-      sickle.input_R1 = clip match {
+      sickle.outputStats = new File(flexiprep.outputDir, s"${flexiprep.sampleId.getOrElse("x")}-${flexiprep.libId.getOrElse("x")}.$read.trim.stats")
+      sickle.inputR1 = clip match {
         case Some(c) => c.fastqOutput
         case _       => seqtk.output
       }
-      sickle.output_R1 = new File(output.getParentFile, input.getName + ".sickle.fq")
+      sickle.outputR1 = new File(output.getParentFile, input.getName + ".sickle.fq")
       addPipeJob(sickle)
       Some(sickle)
     } else None
 
     val outputFile = (clip, trim) match {
-      case (_, Some(t)) => t.output_R1
+      case (_, Some(t)) => t.outputR1
       case (Some(c), _) => c.fastqOutput
       case _            => seqtk.output
     }
diff --git a/public/flexiprep/src/test/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/FlexiprepTest.scala b/public/flexiprep/src/test/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/FlexiprepTest.scala
index 891d32ae3d6681d2c16beb0a19acb6d45110ec4c..f4b6718944b7283d91d7ec06ec0f4317acdcc027 100644
--- a/public/flexiprep/src/test/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/FlexiprepTest.scala
+++ b/public/flexiprep/src/test/scala/nl/lumc/sasc/biopet/pipelines/flexiprep/FlexiprepTest.scala
@@ -70,7 +70,7 @@ class FlexiprepTest extends TestNGSuite with Matchers {
     ), Map(FlexiprepTest.executables.toSeq: _*))
     val flexiprep: Flexiprep = initPipeline(map)
 
-    flexiprep.input_R1 = (if (zipped) FlexiprepTest.r1Zipped else FlexiprepTest.r1)
+    flexiprep.inputR1 = (if (zipped) FlexiprepTest.r1Zipped else FlexiprepTest.r1)
     if (paired) flexiprep.input_R2 = Some((if (zipped) FlexiprepTest.r2Zipped else FlexiprepTest.r2))
     flexiprep.sampleId = Some("1")
     flexiprep.libId = Some("1")
diff --git a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/Gears.scala b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/Gears.scala
index 37c6f110acab86db6ffb243037e7b5a1d7c22aed..d3c94062ca525dc6e80db4d5dd59edcb77de49a8 100644
--- a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/Gears.scala
+++ b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/Gears.scala
@@ -97,7 +97,7 @@ class Gears(val root: Configurable) extends QScript with MultiSampleQScript { qs
       lazy val flexiprep = new Flexiprep(qscript)
       flexiprep.sampleId = Some(sampleId)
       flexiprep.libId = Some(libId)
-      flexiprep.input_R1 = config("R1")
+      flexiprep.inputR1 = config("R1")
       flexiprep.input_R2 = config("R2")
       flexiprep.outputDir = new File(libDir, "flexiprep")
 
@@ -108,7 +108,7 @@ class Gears(val root: Configurable) extends QScript with MultiSampleQScript { qs
 
       /** Function that add library jobs */
       protected def addJobs(): Unit = {
-        inputFiles :+= InputFile(flexiprep.input_R1, config("R1_md5"))
+        inputFiles :+= InputFile(flexiprep.inputR1, config("R1_md5"))
         flexiprep.input_R2.foreach(inputFiles :+= InputFile(_, config("R2_md5")))
         add(flexiprep)
 
diff --git a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsKraken.scala b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsKraken.scala
index 3e1a7b48d88360b8b473929a746c0a778265daa3..e21645fba0c02672b050eb90f55e2a3bf9d8df30 100644
--- a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsKraken.scala
+++ b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsKraken.scala
@@ -58,18 +58,18 @@ class GearsKraken(val root: Configurable) extends QScript with SummaryQScript wi
 
     krakenAnalysis.paired = fastqR2.isDefined
 
-    krakenAnalysis.classified_out = Some(new File(outputDir, s"$outputName.krkn.classified.fastq"))
-    krakenAnalysis.unclassified_out = Some(new File(outputDir, s"$outputName.krkn.unclassified.fastq"))
+    krakenAnalysis.classifiedOut = Some(new File(outputDir, s"$outputName.krkn.classified.fastq"))
+    krakenAnalysis.unclassifiedOut = Some(new File(outputDir, s"$outputName.krkn.unclassified.fastq"))
     add(krakenAnalysis)
 
     outputFiles += ("kraken_output_raw" -> krakenAnalysis.output)
-    outputFiles += ("kraken_classified_out" -> krakenAnalysis.classified_out.getOrElse(""))
-    outputFiles += ("kraken_unclassified_out" -> krakenAnalysis.unclassified_out.getOrElse(""))
+    outputFiles += ("kraken_classified_out" -> krakenAnalysis.classifiedOut.getOrElse(""))
+    outputFiles += ("kraken_unclassified_out" -> krakenAnalysis.unclassifiedOut.getOrElse(""))
 
     // create kraken summary file
     val krakenReport = new KrakenReport(this)
     krakenReport.input = krakenAnalysis.output
-    krakenReport.show_zeros = true
+    krakenReport.showZeros = true
     krakenReport.output = new File(outputDir, s"$outputName.krkn.full")
     add(krakenReport)
 
diff --git a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsQiimeClosed.scala b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsQiimeClosed.scala
index 7fa102d1111b64967c2b5a71088729feed06c804..4076479bad630dafde806009de1cb31fa3bf015b 100644
--- a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsQiimeClosed.scala
+++ b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsQiimeClosed.scala
@@ -42,7 +42,7 @@ class GearsQiimeClosed(val root: Configurable) extends QScript with SummaryQScri
     val splitLib = new SplitLibrariesFastq(this)
     splitLib.input :+= fastqInput
     splitLib.outputDir = new File(outputDir, "split_libraries_fastq")
-    sampleId.foreach(splitLib.sample_ids :+= _)
+    sampleId.foreach(splitLib.sampleIds :+= _)
     add(splitLib)
 
     val closedReference = new PickClosedReferenceOtus(this)
diff --git a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsQiimeRtax.scala b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsQiimeRtax.scala
index f50a38a8652c4f14f26a62eb3f4a55f6a0cce9f9..d893d071f82e6fbb5ef29916b0209c4dacd6229a 100644
--- a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsQiimeRtax.scala
+++ b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsQiimeRtax.scala
@@ -45,14 +45,14 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
     val slfR1 = new SplitLibrariesFastq(this)
     slfR1.input :+= fastqR1
     slfR1.outputDir = new File(outputDir, "split_libraries_fastq_R1")
-    sampleId.foreach(slfR1.sample_ids :+= _)
+    sampleId.foreach(slfR1.sampleIds :+= _)
     add(slfR1)
 
     lazy val slfR2 = fastqR2.map { file =>
       val j = new SplitLibrariesFastq(this)
       j.input :+= file
       j.outputDir = new File(outputDir, "split_libraries_fastq_R2")
-      sampleId.foreach(j.sample_ids :+= _)
+      sampleId.foreach(j.sampleIds :+= _)
       add(j)
       j
     }
@@ -75,8 +75,8 @@ class GearsQiimeRtax(val root: Configurable) extends QScript with BiopetQScript
     assignTaxonomy.outputDir = new File(outputDir, "assign_taxonomy")
     assignTaxonomy.jobOutputFile = new File(assignTaxonomy.outputDir, ".assign_taxonomy.out")
     assignTaxonomy.inputFasta = pickRepSet.outputFasta.get
-    assignTaxonomy.read_1_seqs_fp = Some(slfR1.outputSeqs)
-    assignTaxonomy.read_2_seqs_fp = slfR2.map(_.outputSeqs)
+    assignTaxonomy.read1SeqsFp = Some(slfR1.outputSeqs)
+    assignTaxonomy.read2SeqsFp = slfR2.map(_.outputSeqs)
     add(assignTaxonomy)
   }
 }
diff --git a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsSingle.scala b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsSingle.scala
index abe9edf5a78e486caea88625c7af12f878483783..972307ed7bfd6522a6f2aeab627b640f10936a93 100644
--- a/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsSingle.scala
+++ b/public/gears/src/main/scala/nl/lumc/sasc/biopet/pipelines/gears/GearsSingle.scala
@@ -79,7 +79,7 @@ class GearsSingle(val root: Configurable) extends QScript with SummaryQScript wi
   protected def executeFlexiprep(r1: File, r2: Option[File]): (File, Option[File]) = {
     if (!skipFlexiprep) {
       val flexiprep = new Flexiprep(this)
-      flexiprep.input_R1 = r1
+      flexiprep.inputR1 = r1
       flexiprep.input_R2 = r2
       flexiprep.outputDir = new File(outputDir, "flexiprep")
       add(flexiprep)
diff --git a/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/Gentrap.scala b/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/Gentrap.scala
index adee53966eac02109d44bf402dea34d83a5a55d4..37f3cf7a4894f58fe9dabef41811c388898b6267 100644
--- a/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/Gentrap.scala
+++ b/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/Gentrap.scala
@@ -215,10 +215,10 @@ class Gentrap(val root: Configurable) extends QScript
     } else bamFile
 
     /** Whether all libraries are paired or not */
-    def allPaired: Boolean = libraries.values.forall(_.mapping.forall(_.input_R2.isDefined))
+    def allPaired: Boolean = libraries.values.forall(_.mapping.forall(_.inputR2.isDefined))
 
     /** Whether all libraries are single or not */
-    def allSingle: Boolean = libraries.values.forall(_.mapping.forall(_.input_R2.isEmpty))
+    def allSingle: Boolean = libraries.values.forall(_.mapping.forall(_.inputR2.isEmpty))
 
     /** Adds all jobs for the sample */
     override def addJobs(): Unit = {
diff --git a/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/measures/CufflinksGuided.scala b/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/measures/CufflinksGuided.scala
index 30bdcb5893227c0c53e052317ac8984b088c4610..c3af9880ea3dd5c90e79b36647886738dc06f677 100644
--- a/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/measures/CufflinksGuided.scala
+++ b/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/measures/CufflinksGuided.scala
@@ -10,7 +10,7 @@ import org.broadinstitute.gatk.queue.QScript
 class CufflinksGuided(val root: Configurable) extends QScript with CufflinksMeasurement with AnnotationGtf {
   override def makeCufflinksJob(id: String, bamFile: File) = {
     val cufflinks = super.makeCufflinksJob(id, bamFile)
-    cufflinks.GTF_guide = Some(annotationGtf)
+    cufflinks.gtfGuide = Some(annotationGtf)
     cufflinks
   }
 }
diff --git a/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/measures/CufflinksMeasurement.scala b/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/measures/CufflinksMeasurement.scala
index 26616991e3b74985e38431e4cc19e99bc785d389..b3410cd9d40f0afc354c497a75da84364f80cec4 100644
--- a/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/measures/CufflinksMeasurement.scala
+++ b/public/gentrap/src/main/scala/nl/lumc/sasc/biopet/pipelines/gentrap/measures/CufflinksMeasurement.scala
@@ -11,7 +11,7 @@ trait CufflinksMeasurement extends QScript with Measurement {
   def makeCufflinksJob(id: String, bamFile: File) = {
     val cufflinks = new Cufflinks(this)
     cufflinks.input = bamFile
-    cufflinks.output_dir = new File(outputDir, id)
+    cufflinks.outputDir = new File(outputDir, id)
     cufflinks
   }
 
@@ -25,14 +25,14 @@ trait CufflinksMeasurement extends QScript with Measurement {
 
     val genesFpkmFiles = jobs.toList.map {
       case (id, job) =>
-        val file = new File(job.output_dir, s"$id.genes_fpkm.counts")
+        val file = new File(job.outputDir, s"$id.genes_fpkm.counts")
         add(Ln(this, job.outputGenesFpkm, file))
         file
     }
 
     val isoFormFpkmFiles = jobs.toList.map {
       case (id, job) =>
-        val file = new File(job.output_dir, s"$id.iso_form_fpkn.counts")
+        val file = new File(job.outputDir, s"$id.iso_form_fpkn.counts")
         add(Ln(this, job.outputIsoformsFpkm, file))
         file
     }
diff --git a/public/mapping/src/main/scala/nl/lumc/sasc/biopet/pipelines/mapping/Mapping.scala b/public/mapping/src/main/scala/nl/lumc/sasc/biopet/pipelines/mapping/Mapping.scala
index d37f8031b0f22bc06384b1d2febe7f7a58c49196..ed509aee9903f1a926101368f3bc82b908bacf67 100644
--- a/public/mapping/src/main/scala/nl/lumc/sasc/biopet/pipelines/mapping/Mapping.scala
+++ b/public/mapping/src/main/scala/nl/lumc/sasc/biopet/pipelines/mapping/Mapping.scala
@@ -42,10 +42,10 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
   def this() = this(null)
 
   @Input(doc = "R1 fastq file", shortName = "R1", required = true)
-  var input_R1: File = _
+  var inputR1: File = _
 
   @Input(doc = "R2 fastq file", shortName = "R2", required = false)
-  var input_R2: Option[File] = None
+  var inputR2: Option[File] = None
 
   /** Output name */
   var outputName: String = _
@@ -107,9 +107,9 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
   )
 
   /** File to add to the summary */
-  def summaryFiles: Map[String, File] = Map("output_bam" -> finalBamFile, "input_R1" -> input_R1,
+  def summaryFiles: Map[String, File] = Map("output_bam" -> finalBamFile, "input_R1" -> inputR1,
     "reference" -> referenceFasta()) ++
-    (if (input_R2.isDefined) Map("input_R2" -> input_R2.get) else Map())
+    (if (inputR2.isDefined) Map("input_R2" -> inputR2.get) else Map())
 
   /** Settings to add to summary */
   def summarySettings = Map(
@@ -134,14 +134,14 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
   /** Will be executed before script */
   def init() {
     require(outputDir != null, "Missing output directory on mapping module")
-    require(input_R1 != null, "Missing output directory on mapping module")
+    require(inputR1 != null, "Missing output directory on mapping module")
     require(sampleId.isDefined, "Missing sample ID on mapping module")
     require(libId.isDefined, "Missing library ID on mapping module")
 
-    inputFiles :+= new InputFile(input_R1)
-    input_R2.foreach(inputFiles :+= new InputFile(_))
+    inputFiles :+= new InputFile(inputR1)
+    inputR2.foreach(inputFiles :+= new InputFile(_))
 
-    paired = input_R2.isDefined
+    paired = inputR2.isDefined
 
     if (readgroupId == null)
       readgroupId = config("readgroup_id", default = sampleId.get + "-" + libId.get)
@@ -153,8 +153,8 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
         if (config.contains("numberchunks")) numberChunks = config("numberchunks", default = None)
         else {
           val chunkSize: Int = config("chunksize", 1 << 30)
-          val filesize = if (input_R1.getName.endsWith(".gz") || input_R1.getName.endsWith(".gzip")) input_R1.length * 3
-          else input_R1.length
+          val filesize = if (inputR1.getName.endsWith(".gz") || inputR1.getName.endsWith(".gzip")) inputR1.length * 3
+          else inputR1.length
           numberChunks = Option(ceil(filesize.toDouble / chunkSize).toInt)
         }
       }
@@ -166,40 +166,40 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
   def biopetScript() {
     if (!skipFlexiprep) {
       flexiprep.outputDir = new File(outputDir, "flexiprep")
-      flexiprep.input_R1 = input_R1
-      flexiprep.input_R2 = input_R2
+      flexiprep.inputR1 = inputR1
+      flexiprep.input_R2 = inputR2
       flexiprep.sampleId = this.sampleId
       flexiprep.libId = this.libId
       flexiprep.init()
       flexiprep.runInitialJobs()
     }
     var bamFiles: List[File] = Nil
-    var fastq_R1_output: List[File] = Nil
-    var fastq_R2_output: List[File] = Nil
+    var fastqR1Output: List[File] = Nil
+    var fastqR2Output: List[File] = Nil
 
     val chunks: Map[File, (File, Option[File])] = {
       if (chunking) (for (t <- 1 to numberChunks.getOrElse(1)) yield {
         val chunkDir = new File(outputDir, "chunks" + File.separator + t)
-        chunkDir -> (new File(chunkDir, input_R1.getName),
-          if (paired) Some(new File(chunkDir, input_R2.get.getName)) else None)
+        chunkDir -> (new File(chunkDir, inputR1.getName),
+          if (paired) Some(new File(chunkDir, inputR2.get.getName)) else None)
       }).toMap
-      else if (skipFlexiprep) Map(outputDir -> (input_R1, if (paired) input_R2 else None))
-      else Map(outputDir -> (flexiprep.input_R1, flexiprep.input_R2))
+      else if (skipFlexiprep) Map(outputDir -> (inputR1, if (paired) inputR2 else None))
+      else Map(outputDir -> (flexiprep.inputR1, flexiprep.input_R2))
     }
 
     if (chunking) {
-      val fastSplitter_R1 = new FastqSplitter(this)
-      fastSplitter_R1.input = input_R1
-      for ((chunkDir, fastqfile) <- chunks) fastSplitter_R1.output :+= fastqfile._1
-      fastSplitter_R1.isIntermediate = true
-      add(fastSplitter_R1)
+      val fastSplitterR1 = new FastqSplitter(this)
+      fastSplitterR1.input = inputR1
+      for ((chunkDir, fastqfile) <- chunks) fastSplitterR1.output :+= fastqfile._1
+      fastSplitterR1.isIntermediate = true
+      add(fastSplitterR1)
 
       if (paired) {
-        val fastSplitter_R2 = new FastqSplitter(this)
-        fastSplitter_R2.input = input_R2.get
-        for ((chunkDir, fastqfile) <- chunks) fastSplitter_R2.output :+= fastqfile._2.get
-        fastSplitter_R2.isIntermediate = true
-        add(fastSplitter_R2)
+        val fastSplitterR2 = new FastqSplitter(this)
+        fastSplitterR2.input = inputR2.get
+        for ((chunkDir, fastqfile) <- chunks) fastSplitterR2.output :+= fastqfile._2.get
+        fastSplitterR2.isIntermediate = true
+        add(fastSplitterR2)
       }
     }
 
@@ -211,8 +211,8 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
         logger.debug(chunkDir + " - " + flexiout)
         R1 = flexiout._1
         if (paired) R2 = flexiout._2
-        fastq_R1_output :+= R1
-        R2.foreach(R2 => fastq_R2_output :+= R2)
+        fastqR1Output :+= R1
+        R2.foreach(R2 => fastqR2Output :+= R2)
       }
 
       val outputBam = new File(chunkDir, outputName + ".bam")
@@ -234,7 +234,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
         addAll(BamMetrics(this, outputBam, new File(chunkDir, "metrics"), sampleId, libId).functions)
     }
     if (!skipFlexiprep) {
-      flexiprep.runFinalize(fastq_R1_output, fastq_R2_output)
+      flexiprep.runFinalize(fastqR1Output, fastqR2Output)
       addAll(flexiprep.functions) // Add function of flexiprep to curent function pool
       addSummaryQScript(flexiprep)
     }
@@ -356,15 +356,15 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
     val tophat = new Tophat(this)
     tophat.R1 = tophat.R1 :+ R1
     if (paired) tophat.R2 = tophat.R2 :+ R2.get
-    tophat.output_dir = new File(outputDir, "tophat_out")
+    tophat.outputDir = new File(outputDir, "tophat_out")
     // always output BAM
-    tophat.no_convert_bam = false
+    tophat.noConvertBam = false
     // and always keep input ordering
-    tophat.keep_fasta_order = true
+    tophat.keepFastaOrder = true
     add(tophat)
 
     // fix unmapped file coordinates
-    val fixedUnmapped = new File(tophat.output_dir, "unmapped_fixup.sam")
+    val fixedUnmapped = new File(tophat.outputDir, "unmapped_fixup.sam")
     val fixer = new TophatRecondition(this)
     fixer.inputBam = tophat.outputAcceptedHits
     fixer.outputSam = fixedUnmapped.getAbsoluteFile
@@ -372,14 +372,14 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
     add(fixer)
 
     // sort fixed SAM file
-    val sorter = SortSam(this, fixer.outputSam, new File(tophat.output_dir, "unmapped_fixup.sorted.bam"))
+    val sorter = SortSam(this, fixer.outputSam, new File(tophat.outputDir, "unmapped_fixup.sorted.bam"))
     sorter.sortOrder = "coordinate"
     sorter.isIntermediate = true
     add(sorter)
 
     // merge with mapped file
     val mergeSamFile = MergeSamFiles(this, List(tophat.outputAcceptedHits, sorter.output),
-      new File(tophat.output_dir, "fixed_merged.bam"), sortOrder = "coordinate")
+      new File(tophat.outputDir, "fixed_merged.bam"), sortOrder = "coordinate")
     mergeSamFile.createIndex = true
     mergeSamFile.isIntermediate = true
     add(mergeSamFile)
@@ -443,7 +443,7 @@ class Mapping(val root: Configurable) extends QScript with SummaryQScript with S
   /** Add bowtie2 jobs **/
   def addBowtie2(R1: File, R2: Option[File], output: File): File = {
     val bowtie2 = new Bowtie2(this)
-    bowtie2.rg_id = Some(readgroupId)
+    bowtie2.rgId = Some(readgroupId)
     bowtie2.rg +:= ("LB:" + libId.get)
     bowtie2.rg +:= ("PL:" + platform)
     bowtie2.rg +:= ("PU:" + platformUnit)
diff --git a/public/mapping/src/main/scala/nl/lumc/sasc/biopet/pipelines/mapping/MultisampleMappingTrait.scala b/public/mapping/src/main/scala/nl/lumc/sasc/biopet/pipelines/mapping/MultisampleMappingTrait.scala
index a69160a6a7928add1503c8a332fb84978a3d03fd..5f69fc45e1dd7fbd4131f266b1df4b1d77edb713 100644
--- a/public/mapping/src/main/scala/nl/lumc/sasc/biopet/pipelines/mapping/MultisampleMappingTrait.scala
+++ b/public/mapping/src/main/scala/nl/lumc/sasc/biopet/pipelines/mapping/MultisampleMappingTrait.scala
@@ -110,8 +110,8 @@ trait MultisampleMappingTrait extends MultiSampleQScript
 
         if (inputR1.isDefined) {
           mapping.foreach { m =>
-            m.input_R1 = inputR1.get
-            m.input_R2 = inputR2
+            m.inputR1 = inputR1.get
+            m.inputR2 = inputR2
             add(m)
           }
         } else if (inputBam.isDefined) {
@@ -122,8 +122,8 @@ trait MultisampleMappingTrait extends MultiSampleQScript
             samToFastq.isIntermediate = libraries.size > 1
             qscript.add(samToFastq)
             mapping.foreach(m => {
-              m.input_R1 = samToFastq.fastqR1
-              m.input_R2 = Some(samToFastq.fastqR2)
+              m.inputR1 = samToFastq.fastqR1
+              m.inputR2 = Some(samToFastq.fastqR2)
               add(m)
             })
           } else {
diff --git a/public/mapping/src/test/scala/nl/lumc/sasc/biopet/pipelines/mapping/MappingTest.scala b/public/mapping/src/test/scala/nl/lumc/sasc/biopet/pipelines/mapping/MappingTest.scala
index 7af4c60d5a2dc07b31b60864c5557287fc949fca..cce528aaa5f10f796dd06c34f45860b689ccd12e 100644
--- a/public/mapping/src/test/scala/nl/lumc/sasc/biopet/pipelines/mapping/MappingTest.scala
+++ b/public/mapping/src/test/scala/nl/lumc/sasc/biopet/pipelines/mapping/MappingTest.scala
@@ -73,11 +73,11 @@ abstract class AbstractTestMapping(val aligner: String) extends TestNGSuite with
     val mapping: Mapping = initPipeline(map)
 
     if (zipped) {
-      mapping.input_R1 = r1Zipped
-      if (paired) mapping.input_R2 = Some(r2Zipped)
+      mapping.inputR1 = r1Zipped
+      if (paired) mapping.inputR2 = Some(r2Zipped)
     } else {
-      mapping.input_R1 = r1
-      if (paired) mapping.input_R2 = Some(r2)
+      mapping.inputR1 = r1
+      if (paired) mapping.inputR2 = Some(r2)
     }
     mapping.sampleId = Some("1")
     mapping.libId = Some("1")
diff --git a/public/sage/src/main/scala/nl/lumc/sasc/biopet/pipelines/sage/Sage.scala b/public/sage/src/main/scala/nl/lumc/sasc/biopet/pipelines/sage/Sage.scala
index 8d28a6ea13e1e09ce16afbec4faccc29631945c8..3ef45bc4ea2f141fb6c5cbb17b043121d2d5c60e 100644
--- a/public/sage/src/main/scala/nl/lumc/sasc/biopet/pipelines/sage/Sage.scala
+++ b/public/sage/src/main/scala/nl/lumc/sasc/biopet/pipelines/sage/Sage.scala
@@ -92,7 +92,7 @@ class Sage(val root: Configurable) extends QScript with MultiSampleQScript {
         inputFiles :+= new InputFile(inputFastq, config("R1_md5"))
 
         flexiprep.outputDir = new File(libDir, "flexiprep/")
-        flexiprep.input_R1 = inputFastq
+        flexiprep.inputR1 = inputFastq
         flexiprep.init()
         flexiprep.biopetScript()
         qscript.addAll(flexiprep.functions)
@@ -105,7 +105,7 @@ class Sage(val root: Configurable) extends QScript with MultiSampleQScript {
         pf.deps +:= flexiprep.outputFiles("fastq_input_R1")
         qscript.add(pf)
 
-        mapping.input_R1 = pf.outputFastq
+        mapping.inputR1 = pf.outputFastq
         mapping.outputDir = libDir
         mapping.init()
         mapping.biopetScript()
diff --git a/public/shiva/src/main/scala/nl/lumc/sasc/biopet/pipelines/shiva/svcallers/Pindel.scala b/public/shiva/src/main/scala/nl/lumc/sasc/biopet/pipelines/shiva/svcallers/Pindel.scala
index 506aed3835bedccd8028ac2e531c2c74874874a8..25281ec11f2f26b608378008c2976a33afff4849 100644
--- a/public/shiva/src/main/scala/nl/lumc/sasc/biopet/pipelines/shiva/svcallers/Pindel.scala
+++ b/public/shiva/src/main/scala/nl/lumc/sasc/biopet/pipelines/shiva/svcallers/Pindel.scala
@@ -36,11 +36,11 @@ class Pindel(val root: Configurable) extends SvCaller {
     for ((sample, bamFile) <- inputBams) {
       val pindelDir = new File(outputDir, sample)
 
-      val config_file: File = new File(pindelDir, sample + ".pindel.cfg")
+      val configFile: File = new File(pindelDir, sample + ".pindel.cfg")
       val cfg = new PindelConfig(this)
       cfg.input = bamFile
       cfg.sampleName = sample
-      cfg.output = config_file
+      cfg.output = configFile
       add(cfg)
 
       val pindel = PindelCaller(this, cfg.output, pindelDir)