Skip to content
Snippets Groups Projects
Commit 8b471bb1 authored by Peter van 't Hof's avatar Peter van 't Hof
Browse files

Merge branch 'feature-pindelwrapper' into 'develop'

Adding the pindel wrapper to the extensions. Updated to support v0.2.5b8

Adding the pindel wrapper

Updates:

- [x] Pindel wrapper update to support version v0.2.5b8
- [x] Adding tool to create pindel-config (aka bam list for pindel)
- [x] VCF output for pindel (using the pindel2vcf)
- [x] Insertsize estimation tool (not using bammetrics, is done in mapping pipeline, but not accesible from shivasvcalling->pindel)

Closes #240
Closes #263

See merge request !304
parents e1fecc16 d0e1be17
No related branches found
No related tags found
No related merge requests found
Showing
with 702 additions and 327 deletions
/**
* Biopet is built on top of GATK Queue for building bioinformatic
* pipelines. It is mainly intended to support LUMC SHARK cluster which is running
* SGE. But other types of HPC that are supported by GATK Queue (such as PBS)
* should also be able to execute Biopet tools and pipelines.
*
* Copyright 2014 Sequencing Analysis Support Core - Leiden University Medical Center
*
* Contact us at: sasc@lumc.nl
*
* A dual licensing mode is applied. The source code within this project that are
* not part of GATK Queue is freely available for non-commercial use under an AGPL
* license; For commercial users or users who do not want to follow the AGPL
* license, please contact us to obtain a separate license.
*/
package nl.lumc.sasc.biopet.extensions.pindel
import java.io.File
import nl.lumc.sasc.biopet.core.{ BiopetCommandLineFunction, Reference, Version }
import nl.lumc.sasc.biopet.utils.Logging
import nl.lumc.sasc.biopet.utils.config.Configurable
import org.broadinstitute.gatk.utils.commandline._
/**
* Extension for pindel
*
* Based on version 0.2.5b8
*/
class PindelCaller(val root: Configurable) extends BiopetCommandLineFunction with Reference with Version {
executable = config("exe", default = "pindel")
override def defaultCoreMemory = 4.0
override def defaultThreads = 4
def versionRegex = """Pindel version:? (.*)""".r
override def versionExitcode = List(1)
def versionCommand = executable
/**
* Required parameters
*/
@Input
var reference: File = referenceFasta
@Input(doc = "Input specification for Pindel to use")
var input: File = _
@Argument(doc = "The pindel configuration file", required = false)
var pindelFile: Option[File] = None
@Argument(doc = "Configuration file with: bam-location/insert size/name", required = false)
var configFile: Option[File] = None
@Argument(doc = "Work directory")
var outputPrefix: File = _
@Output(doc = "Output file of Pindel, pointing to the DEL file")
var outputFile: File = _
@Output(doc = "", required = false)
var outputINV: File = _
@Output(doc = "", required = false)
var outputTD: File = _
@Output(doc = "", required = false)
var outputLI: File = _
@Output(doc = "", required = false)
var outputBP: File = _
@Output(doc = "", required = false)
var outputSI: File = _
@Output(doc = "", required = false)
var outputRP: File = _
@Output(doc = "", required = false)
var outputCloseEndMapped: File = _
var RP: Option[Int] = config("RP")
var minDistanceToTheEnd: Option[Int] = config("min_distance_to_the_end")
// var threads
var maxRangeIndex: Option[Int] = config("max_range_index")
var windowSize: Option[Int] = config("window_size")
var sequencingErrorRate: Option[Float] = config("sequencing_error_rate")
var sensitivity: Option[Float] = config("sensitivity")
var maximumAllowedMismatchRate: Option[Float] = config("maximum_allowed_mismatch_rate")
var nm: Option[Int] = config("nm")
var reportInversions: Boolean = config("report_inversions", default = false)
var reportDuplications: Boolean = config("report_duplications", default = false)
var reportLongInsertions: Boolean = config("report_long_insertions", default = false)
var reportBreakpoints: Boolean = config("report_breakpoints", default = false)
var reportCloseMappedReads: Boolean = config("report_close_mapped_reads", default = false)
var reportOnlyCloseMappedReads: Boolean = config("report_only_close_mapped_reads", default = false)
var reportInterchromosomalEvents: Boolean = config("report_interchromosomal_events", default = false)
var IndelCorrection: Boolean = config("IndelCorrection", default = false)
var NormalSamples: Boolean = config("NormalSamples", default = false)
var breakdancer: Option[File] = config("breakdancer")
var include: Option[File] = config("include")
var exclude: Option[File] = config("exclude")
var additionalMismatch: Option[Int] = config("additional_mismatch")
var minPerfectMatchAroundBP: Option[Int] = config("min_perfect_match_around_BP")
var minInversionSize: Option[Int] = config("min_inversion_size")
var minNumMatchedBases: Option[Int] = config("min_num_matched_bases")
var balanceCutoff: Option[Int] = config("balance_cutoff")
var anchorQuality: Option[Int] = config("anchor_quality")
var minimumSupportForEvent: Option[Int] = config("minimum_support_for_event")
var inputSVCallsForAssembly: Option[File] = config("input_SV_Calls_for_assembly")
var genotyping: Boolean = config("genotyping", default = false)
var outputOfBreakdancerEvents: Option[File] = config("output_of_breakdancer_events")
var nameOfLogfile: Option[File] = config("name_of_logfile")
var ploidy: Option[File] = config("ploidy")
var detectDD: Boolean = config("detect_DD", default = false)
var MAX_DD_BREAKPOINT_DISTANCE: Option[Int] = config("MAX_DD_BREAKPOINT_DISTANCE")
var MAX_DISTANCE_CLUSTER_READS: Option[Int] = config("MAX_DISTANCE_CLUSTER_READS")
var MIN_DD_CLUSTER_SIZE: Option[Int] = config("MIN_DD_CLUSTER_SIZE")
var MIN_DD_BREAKPOINT_SUPPORT: Option[Int] = config("MIN_DD_BREAKPOINT_SUPPORT")
var MIN_DD_MAP_DISTANCE: Option[Int] = config("MIN_DD_MAP_DISTANCE")
var DD_REPORT_DUPLICATION_READS: Option[Int] = config("DD_REPORT_DUPLICATION_READS")
override def beforeGraph: Unit = {
if (reference == null) reference = referenceFasta()
// we should check whether the `pindel-config-file` is set or the `config-file` for the bam-list
// at least one of them should be set.
(pindelFile, configFile) match {
case (None, None) => Logging.addError("No pindel config is given")
case (Some(a), Some(b)) => Logging.addError(s"Please specify either a pindel config or bam-config. Not both for Pindel: $a or $b")
case (Some(a), None) => {
Logging.logger.info(s"Using '${a}' as pindel config for Pindel")
input = a.getAbsoluteFile
}
case (None, Some(b)) => {
Logging.logger.info(s"Using '${b}' as bam config for Pindel")
input = b.getAbsoluteFile
}
}
/** setting the output files for the many outputfiles pindel has */
outputINV = new File(outputPrefix + File.separator, "sample_INV")
outputTD = new File(outputPrefix + File.separator, "sample_TD")
if (reportLongInsertions) {
outputLI = new File(outputPrefix + File.separator, "sample_LI")
}
if (reportBreakpoints) {
outputBP = new File(outputPrefix + File.separator, "sample_BP")
}
outputSI = new File(outputPrefix + File.separator, "sample_SI")
outputRP = new File(outputPrefix + File.separator, "sample_RP")
if (reportCloseMappedReads) {
outputCloseEndMapped = new File(outputPrefix + File.separator, "sample_CloseEndMapped")
}
// set the output file, the DELetion call is always made
outputFile = new File(outputPrefix + File.separator, "sample_D")
}
def cmdLine = required(executable) +
required("--fasta ", reference) +
optional("--pindel-config-file", pindelFile) +
optional("--config-file", configFile) +
required("--output-prefix ", new File(outputPrefix + File.separator, "sample")) +
optional("--RP", RP) +
optional("--min_distance_to_the_end", minDistanceToTheEnd) +
optional("--number_of_threads", threads) +
optional("--max_range_index", maxRangeIndex) +
optional("--windows_size", windowSize) +
optional("--sequencing_error_rate", sequencingErrorRate) +
optional("--sensitivity", sensitivity) +
optional("--maximum_allowed_mismatch_rate", maximumAllowedMismatchRate) +
optional("--NM", nm) +
conditional(reportInversions, "--report_inversions") +
conditional(reportDuplications, "--report_duplications") +
conditional(reportLongInsertions, "--report_long_insertions") +
conditional(reportBreakpoints, "--report_breakpoints") +
conditional(reportCloseMappedReads, "--report_close_mapped_reads") +
conditional(reportOnlyCloseMappedReads, "--report_only_close_mapped_reads") +
conditional(reportInterchromosomalEvents, "--report_interchromosomal_events") +
conditional(IndelCorrection, "--IndelCorrection") +
conditional(NormalSamples, "--NormalSamples") +
optional("--breakdancer", breakdancer) +
optional("--include", include) +
optional("--exclude", exclude) +
optional("--additional_mismatch", additionalMismatch) +
optional("--min_perfect_match_around_BP", minPerfectMatchAroundBP) +
optional("--min_inversion_size", minInversionSize) +
optional("--min_num_matched_bases", minNumMatchedBases) +
optional("--balance_cutoff", balanceCutoff) +
optional("--anchor_quality", anchorQuality) +
optional("--minimum_support_for_event", minimumSupportForEvent) +
optional("--input_SV_Calls_for_assembly", inputSVCallsForAssembly) +
conditional(genotyping, "-g") +
optional("--output_of_breakdancer_events", outputOfBreakdancerEvents) +
optional("--name_of_logfile", nameOfLogfile) +
optional("--Ploidy", ploidy) +
conditional(detectDD, "detect_DD") +
optional("--MAX_DD_BREAKPOINT_DISTANCE", MAX_DD_BREAKPOINT_DISTANCE) +
optional("--MAX_DISTANCE_CLUSTER_READS", MAX_DISTANCE_CLUSTER_READS) +
optional("--MIN_DD_CLUSTER_SIZE", MIN_DD_CLUSTER_SIZE) +
optional("--MIN_DD_BREAKPOINT_SUPPORT", MIN_DD_BREAKPOINT_SUPPORT) +
optional("--MIN_DD_MAP_DISTANCE", MIN_DD_MAP_DISTANCE) +
optional("--DD_REPORT_DUPLICATION_READS", DD_REPORT_DUPLICATION_READS)
}
object PindelCaller {
def apply(root: Configurable, configFile: File, outputDir: File): PindelCaller = {
val caller = new PindelCaller(root)
caller.configFile = Some(configFile)
caller.outputPrefix = outputDir
caller.beforeGraph
caller
}
}
......@@ -14,11 +14,12 @@
* license, please contact us to obtain a separate license.
*/
package nl.lumc.sasc.biopet.extensions.pindel
import java.io.{ File, PrintWriter }
import java.io.File
import nl.lumc.sasc.biopet.core.{ BiopetJavaCommandLineFunction, ToolCommand }
import htsjdk.samtools.SamReaderFactory
import nl.lumc.sasc.biopet.core.BiopetJavaCommandLineFunction
import nl.lumc.sasc.biopet.utils.config.Configurable
import nl.lumc.sasc.biopet.utils.{ BamUtils, ToolCommand }
import org.broadinstitute.gatk.utils.commandline.{ Argument, Input, Output }
class PindelConfig(val root: Configurable) extends BiopetJavaCommandLineFunction {
......@@ -30,46 +31,34 @@ class PindelConfig(val root: Configurable) extends BiopetJavaCommandLineFunction
var output: File = _
@Argument(doc = "Insertsize")
var insertsize: Option[Int] = _
var insertSize: Int = _
var sampleName: String = _
override def cmdLine = super.cmdLine +
"-i" + required(input) +
"-s" + required(insertsize) +
"-o" + required(output)
required("-i", input) +
required("-n", sampleName) +
required("-s", insertSize) +
required("-o", output)
}
object PindelConfig extends ToolCommand {
def apply(root: Configurable, input: File, output: File): PindelConfig = {
val conf = new PindelConfig(root)
conf.input = input
conf.output = output
conf
}
def apply(root: Configurable, input: File, outputDir: String): PindelConfig = {
val dir = if (outputDir.endsWith("/")) outputDir else outputDir + "/"
val outputFile = new File(dir + swapExtension(input.getName))
apply(root, input, outputFile)
}
def apply(root: Configurable, input: File): PindelConfig = {
apply(root, input, new File(swapExtension(input.getAbsolutePath)))
}
private def swapExtension(inputFile: String) = inputFile.substring(0, inputFile.lastIndexOf(".bam")) + ".pindel.cfg"
case class Args(inputbam: File = null, samplelabel: Option[String] = None, insertsize: Option[Int] = None) extends AbstractArgs
case class Args(inputBam: File = null, sampleLabel: Option[String] = None,
insertSize: Option[Int] = None, output: Option[File] = None) extends AbstractArgs
class OptParser extends AbstractOptParser {
opt[File]('i', "inputbam") required () valueName "<bamfile/path>" action { (x, c) =>
c.copy(inputbam = x)
c.copy(inputBam = x)
} text "Please specify the input bam file"
opt[String]('l', "samplelabel") valueName "<sample label>" action { (x, c) =>
c.copy(samplelabel = Some(x))
opt[String]('n', "samplelabel") valueName "<sample label>" action { (x, c) =>
c.copy(sampleLabel = Some(x))
} text "Sample label is missing"
opt[Int]('s', "insertsize") valueName "<insertsize>" action { (x, c) =>
c.copy(insertsize = Some(x))
c.copy(insertSize = Some(x))
} text "Insertsize is missing"
opt[File]('o', "output") valueName "<output>" action { (x, c) =>
c.copy(output = Some(x))
} text "Output path is missing"
}
/**
......@@ -79,7 +68,29 @@ object PindelConfig extends ToolCommand {
val argsParser = new OptParser
val commandArgs: Args = argsParser.parse(args, Args()) getOrElse sys.exit(1)
val input: File = commandArgs.inputbam
val input: File = commandArgs.inputBam
val output: File = commandArgs.output.getOrElse(new File(input.getAbsoluteFile + ".pindel.cfg"))
val insertSize: Int = commandArgs.insertSize.getOrElse(BamUtils.sampleBamInsertSize(input))
val bamReader = SamReaderFactory.makeDefault().open(input)
val writer = new PrintWriter(output)
for (sample <- BamUtils.sampleBamMap(List(input))) {
writer.write("%s\t%d\t%s\n".format(sample._2.getAbsoluteFile, insertSize, sample._1))
}
bamReader.close()
writer.close()
// the logic here is to pull the libraries stored in the bam file and output this to a pindel config file.
// see: http://gmt.genome.wustl.edu/packages/pindel/quick-start.html
// this is called bam-configuration file
// sampleLabel can be given from the commandline or read from the bam header
/**
* filename<tab>avg insert size<tab>sample_label or name for reporting
* tumor_sample_1222.bam<tab>250<tab>TUMOR_1222
* somatic_sample_1222.bam<tab>250<tab>HEALTHY_1222
*/
}
}
......
package nl.lumc.sasc.biopet.extensions.pindel
import java.io.File
import nl.lumc.sasc.biopet.core.{ Version, Reference, BiopetCommandLineFunction }
import nl.lumc.sasc.biopet.utils.config.Configurable
import org.broadinstitute.gatk.utils.commandline.{ Output, Input }
/**
* Created by wyleung on 20-1-16.
*/
class PindelVCF(val root: Configurable) extends BiopetCommandLineFunction with Reference with Version {
executable = config("exe", default = "pindel2vcf")
override def defaultCoreMemory = 2.0
override def defaultThreads = 1
def versionRegex = """Version:?[ ]+(.*)""".r
override def versionExitcode = List(0)
def versionCommand = executable + " -h"
/**
* Required parameters
*/
@Input
var reference: File = referenceFasta
@Output
var outputVCF: File = _
var rDate: String = config("rdate", freeVar = false)
override def beforeGraph: Unit = {
if (reference == null) reference = referenceFasta()
}
@Input(doc = "Make this file a dependency before pindel2vcf can run. Usually a file generated by Pindel such as a _D file")
var pindelOutputInputHolder: File = _
var pindelOutput: Option[File] = config("pindel_output")
var pindelOutputRoot: Option[File] = config("pindel_output_root")
var chromosome: Option[String] = config("chromosome")
var windowSize: Option[Int] = config("window_size")
var minCoverage: Option[Int] = config("min_coverage")
var hetCutoff: Option[Float] = config("het_cutoff")
var homCutoff: Option[Float] = config("hom_cutoff")
var minSize: Option[Int] = config("min_size")
var maxSize: Option[Int] = config("max_size")
var bothStrandSupported: Boolean = config("both_strand_supported", default = false)
var minSupportingSamples: Option[Int] = config("min_supporting_samples")
var minSupportingReads: Option[Int] = config("min_supporting_reads")
var maxSupportingReads: Option[Int] = config("max_supporting_reads")
var regionStart: Option[Int] = config("region_start")
var regionEnd: Option[Int] = config("region_end")
var maxInternalRepeats: Option[Int] = config("max_internal_repeats")
var compactOutLimit: Option[Int] = config("compact_output_limit")
var maxInternalRepeatLength: Option[Int] = config("max_internal_repeatlength")
var maxPostindelRepeats: Option[Int] = config("max_postindel_repeat")
var maxPostindelRepeatLength: Option[Int] = config("max_postindel_repeatlength")
var onlyBalancedSamples: Boolean = config("only_balanced_samples", default = false)
var somaticP: Boolean = config("somatic_p", default = false)
var minimumStrandSupport: Option[Int] = config("minimum_strand_support")
var gatkCompatible: Boolean = config("gatk_compatible", default = false)
def cmdLine = required(executable) +
required("--reference", reference) +
required("--reference_name", referenceSpecies) +
required("--reference_date", rDate) +
required("--fake_biopet_input_holder", pindelOutputInputHolder) +
optional("--pindel_output", pindelOutput) +
optional("--pindel_output_root", pindelOutputRoot) +
required("--vcf", outputVCF) +
optional("--chromosome", chromosome) +
optional("--window_size", windowSize) +
optional("--min_coverage", minCoverage) +
optional("--het_cutoff", hetCutoff) +
optional("--hom_cutoff", homCutoff) +
optional("--min_size", minSize) +
optional("--max_size", maxSize) +
conditional(bothStrandSupported, "--both_strands_supported") +
optional("--min_supporting_samples", minSupportingSamples) +
optional("--min_supporting_reads", minSupportingReads) +
optional("--max_supporting_reads", maxSupportingReads) +
optional("--region_start", regionStart) +
optional("--region_end", regionEnd) +
optional("--max_internal_repeats", maxInternalRepeats) +
optional("--compact_output_limit", compactOutLimit) +
optional("--max_internal_repeatlength", maxInternalRepeatLength) +
optional("--max_postindel_repeats", maxPostindelRepeats) +
optional("--max_postindel_repeatlength", maxPostindelRepeatLength) +
conditional(onlyBalancedSamples, "--only_balanced_samples") +
conditional(somaticP, "--somatic_p") +
optional("--minimum_strand_support", minimumStrandSupport) +
conditional(gatkCompatible, "--gatk_compatible")
}
......@@ -2,9 +2,10 @@ package nl.lumc.sasc.biopet.utils
import java.io.File
import htsjdk.samtools.SamReaderFactory
import htsjdk.samtools.{ SamReader, SamReaderFactory }
import scala.collection.JavaConversions._
import scala.collection.parallel.immutable
/**
* Created by pjvan_thof on 11/19/15.
......@@ -31,4 +32,64 @@ object BamUtils {
if (temp.map(_._1).distinct.size != temp.size) throw new IllegalArgumentException("Samples has been found twice")
temp.toMap
}
/**
* Estimate the insertsize of fragments within the given contig.
* Uses the properly paired reads according to flags set by the aligner
*
* @param inputBam input bam file
* @param contig contig to scan for
* @param end postion to stop scanning
* @return Int with insertsize for this contig
*/
def contigInsertSize(inputBam: File, contig: String, start: Int, end: Int, samplingSize: Int = 100000): Option[Int] = {
val inputSam: SamReader = SamReaderFactory.makeDefault.open(inputBam)
val samIterator = inputSam.query(contig, start, end, true)
val insertsizes: List[Int] = (for {
read <- samIterator.toStream.takeWhile(rec => {
val paired = rec.getReadPairedFlag && rec.getProperPairFlag
val bothMapped = if (paired) ((rec.getReadUnmappedFlag == false) && (rec.getMateUnmappedFlag == false)) else false
paired && bothMapped
}).take(samplingSize)
} yield {
read.getInferredInsertSize.asInstanceOf[Int].abs
})(collection.breakOut)
val cti = insertsizes.foldLeft((0.0, 0))((t, r) => (t._1 + r, t._2 + 1))
samIterator.close()
inputSam.close()
val ret = if (cti._2 == 0) None else Some((cti._1 / cti._2).toInt)
ret
}
/**
* Estimate the insertsize for one single bamfile and return the insertsize
*
* @param bamFile bamfile to estimate avg insertsize from
* @return
*/
def sampleBamInsertSize(bamFile: File, samplingSize: Int = 100000): Int = {
val inputSam: SamReader = SamReaderFactory.makeDefault.open(bamFile)
val baminsertsizes = inputSam.getFileHeader.getSequenceDictionary.getSequences.par.map({
contig => BamUtils.contigInsertSize(bamFile, contig.getSequenceName, 1, contig.getSequenceLength, samplingSize)
}).toList
val counts = baminsertsizes.flatMap(x => x)
if (counts.size != 0) {
counts.sum / counts.size
} else {
0
}
}
/**
* Estimate the insertsize for each bam file and return Map[<sampleBamFile>, <insertSize>]
*
* @param bamFiles input bam files
* @return
*/
def sampleBamsInsertSize(bamFiles: List[File], samplingSize: Int = 100000): immutable.ParMap[File, Int] = bamFiles.par.map { bamFile =>
bamFile -> sampleBamInsertSize(bamFile, samplingSize)
}.toMap
}
package nl.lumc.sasc.biopet.utils
import java.io.File
import htsjdk.samtools._
import org.mockito.Mockito.{ inOrder => inOrd }
import org.scalatest.Matchers
import org.scalatest.mock.MockitoSugar
import org.scalatest.testng.TestNGSuite
import org.testng.annotations.{ BeforeClass, Test }
/**
* Created by wyleung on 22-2-16.
* Create samfile and records are borrowed from WipeReadsTest by @bow
*/
class BamUtilsTest extends TestNGSuite with MockitoSugar with Matchers {
import BamUtils._
private val samHeaderTemplate: SAMLineParser = {
val samh = new SAMFileHeader
samh.setSortOrder(SAMFileHeader.SortOrder.coordinate)
samh.addSequence(new SAMSequenceRecord("chrQ", 10000))
samh.addSequence(new SAMSequenceRecord("chrR", 10000))
val readGroup1 = new SAMReadGroupRecord("001")
readGroup1.setSample("sample01")
samh.addReadGroup(readGroup1)
val readGroup2 = new SAMReadGroupRecord("002")
readGroup2.setSample("sample01")
samh.addReadGroup(readGroup2)
new SAMLineParser(samh)
}
private def makeSams(raws: String*): Seq[SAMRecord] =
raws.map(s => samHeaderTemplate.parseLine(s))
private def makeSamReader(f: File): SamReader = SamReaderFactory
.make()
.validationStringency(ValidationStringency.LENIENT)
.open(f)
val sBamRecs1 = makeSams(
"r02\t0\tchrQ\t50\t60\t10M\t*\t0\t0\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
"r01\t16\tchrQ\t190\t60\t10M\t*\t0\t0\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
"r01\t16\tchrQ\t290\t60\t10M\t*\t0\t0\tGGGGGAAAAA\tGGGGGGGGGG\tRG:Z:001",
"r04\t0\tchrQ\t450\t60\t10M\t*\t0\t0\tCGTACGTACG\tEEFFGGHHII\tRG:Z:001",
"r03\t16\tchrQ\t690\t60\t10M\t*\t0\t0\tCCCCCTTTTT\tHHHHHHHHHH\tRG:Z:001",
"r05\t0\tchrQ\t890\t60\t5M200N5M\t*\t0\t0\tGATACGATAC\tFEFEFEFEFE\tRG:Z:001",
"r06\t4\t*\t0\t0\t*\t*\t0\t0\tATATATATAT\tHIHIHIHIHI\tRG:Z:001"
)
val pBamRecs1 = makeSams(
"r02\t99\tchrQ\t50\t60\t10M\t=\t90\t50\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
"r02\t147\tchrQ\t90\t60\t10M\t=\t50\t-50\tATGCATGCAT\tEEFFGGHHII\tRG:Z:001",
"r01\t163\tchrQ\t150\t60\t10M\t=\t190\t50\tAAAAAGGGGG\tGGGGGGGGGG\tRG:Z:001",
"r01\t83\tchrQ\t190\t60\t10M\t=\t150\t-50\tGGGGGAAAAA\tGGGGGGGGGG\tRG:Z:001",
"r01\t163\tchrQ\t250\t60\t10M\t=\t290\t50\tAAAAAGGGGG\tGGGGGGGGGG\tRG:Z:001",
"r01\t83\tchrQ\t290\t60\t10M\t=\t250\t-50\tGGGGGAAAAA\tGGGGGGGGGG\tRG:Z:001",
"r04\t99\tchrQ\t450\t60\t10M\t=\t490\t50\tCGTACGTACG\tEEFFGGHHII\tRG:Z:001",
"r04\t147\tchrQ\t490\t60\t10M\t=\t450\t-50\tGCATGCATGC\tEEFFGGHHII\tRG:Z:001",
"r03\t163\tchrQ\t650\t60\t10M\t=\t690\t50\tTTTTTCCCCC\tHHHHHHHHHH\tRG:Z:001",
"r03\t83\tchrQ\t690\t60\t10M\t=\t650\t-50\tCCCCCTTTTT\tHHHHHHHHHH\tRG:Z:001",
"r05\t99\tchrQ\t890\t60\t5M200N5M\t=\t1140\t50\tTACGTACGTA\tEEFFGGHHII\tRG:Z:001",
"r05\t147\tchrQ\t1140\t60\t10M\t=\t890\t-50\tATGCATGCAT\tEEFFGGHHII\tRG:Z:001",
"r06\t4\t*\t0\t0\t*\t*\t0\t0\tATATATATAT\tHIHIHIHIHI\tRG:Z:001",
"r06\t4\t*\t0\t0\t*\t*\t0\t0\tGCGCGCGCGC\tHIHIHIHIHI\tRG:Z:001"
)
@BeforeClass
def start: Unit = {
createTestFileFrom(sBamRecs1, BamUtilsTest.singleEndBam01)
createTestFileFrom(pBamRecs1, BamUtilsTest.pairedEndBam01)
}
private def createTestFileFrom(records: Seq[SAMRecord], output: File): File = {
output.getParentFile.createNewFile()
val outBam = new SAMFileWriterFactory()
.setCreateIndex(true)
.setUseAsyncIo(true)
.makeBAMWriter(samHeaderTemplate.getFileHeader, true, output)
writeBam(records, outBam)
output
}
private def writeBam(records: Seq[SAMRecord], outBam: SAMFileWriter): Unit = {
try {
for (rec <- records) {
outBam.addAlignment(rec)
}
} finally {
outBam.close()
}
}
@Test def testInputSingleEndOK() = {
sBamRecs1.size shouldBe 7
}
@Test def testInputPairedEndOK() = {
pBamRecs1.size shouldBe 14
}
@Test def testPairedRecords = {
sampleBamInsertSize(BamUtilsTest.pairedEndBam01) shouldBe 50
}
@Test def testContigInsertsize = {
contigInsertSize(BamUtilsTest.pairedEndBam01, "chrQ", 1, 10000) shouldBe Some(50)
contigInsertSize(BamUtilsTest.pairedEndBam01, "chrR", 1, 10000) shouldBe None
}
@Test def testContigInsertsizeContigNotFound = {
intercept(contigInsertSize(BamUtilsTest.pairedEndBam01, "chrW", 1, 10000))
}
@Test def testSingleRecords = {
sampleBamInsertSize(BamUtilsTest.singleEndBam01) shouldBe 0
}
@Test def testMultiBamInsertsizes = {
val result = sampleBamsInsertSize(List(BamUtilsTest.singleEndBam01, BamUtilsTest.pairedEndBam01))
result shouldEqual Map(
BamUtilsTest.singleEndBam01 -> 0,
BamUtilsTest.pairedEndBam01 -> 50
)
}
@Test def testSampleBamNames = {
sampleBamMap(List(BamUtilsTest.singleEndBam01)) shouldEqual Map(
"sample01" -> BamUtilsTest.singleEndBam01
)
}
}
object BamUtilsTest {
var singleEndBam01: File = File.createTempFile("bamutils", "single01.bam")
singleEndBam01.deleteOnExit()
var pairedEndBam01: File = File.createTempFile("bamutils", "paired01.bam")
singleEndBam01.deleteOnExit()
}
\ No newline at end of file
......@@ -17,7 +17,7 @@ package nl.lumc.sasc.biopet.pipelines.shiva
import nl.lumc.sasc.biopet.core.summary.SummaryQScript
import nl.lumc.sasc.biopet.core.{ PipelineCommand, Reference, SampleLibraryTag }
import nl.lumc.sasc.biopet.pipelines.shiva.svcallers.{ Delly, Breakdancer, Clever, SvCaller }
import nl.lumc.sasc.biopet.pipelines.shiva.svcallers._
import nl.lumc.sasc.biopet.utils.{ BamUtils, Logging }
import nl.lumc.sasc.biopet.utils.config.Configurable
import org.broadinstitute.gatk.queue.QScript
......@@ -67,7 +67,7 @@ class ShivaSvCalling(val root: Configurable) extends QScript with SummaryQScript
}
/** Will generate all available variantcallers */
protected def callersList: List[SvCaller] = List(new Breakdancer(this), new Clever(this), new Delly(this))
protected def callersList: List[SvCaller] = List(new Breakdancer(this), new Clever(this), new Delly(this), new Pindel(this))
/** Location of summary file */
def summaryFile = new File(outputDir, "ShivaSvCalling.summary.json")
......
......@@ -13,60 +13,51 @@
* license; For commercial users or users who do not want to follow the AGPL
* license, please contact us to obtain a separate license.
*/
package nl.lumc.sasc.biopet.extensions.pindel
package nl.lumc.sasc.biopet.pipelines.shiva.svcallers
import java.io.File
import java.text.SimpleDateFormat
import java.util.Calendar
import nl.lumc.sasc.biopet.core.BiopetCommandLineFunction
import nl.lumc.sasc.biopet.extensions.pindel._
import nl.lumc.sasc.biopet.utils.config.Configurable
import org.broadinstitute.gatk.utils.commandline.{ Argument, Input, Output }
class PindelCaller(val root: Configurable) extends BiopetCommandLineFunction {
executable = config("exe", default = "pindel", freeVar = false)
/// Pindel is actually a mini pipeline executing binaries from the pindel package
class Pindel(val root: Configurable) extends SvCaller {
val name = "pindel"
/** Default pipeline config */
override def defaults = Map(
"pindelvcf" -> Map(
"rdate" -> new SimpleDateFormat("yyyyMMdd").format(Calendar.getInstance().getTime()),
"compact_output_limit" -> 10
))
def biopetScript() {
for ((sample, bamFile) <- inputBams) {
val pindelDir = new File(outputDir, sample)
val config_file: File = new File(pindelDir, sample + ".pindel.cfg")
val cfg = new PindelConfig(this)
cfg.input = bamFile
cfg.sampleName = sample
cfg.output = config_file
add(cfg)
val pindel = PindelCaller(this, cfg.output, pindelDir)
add(pindel)
// Current date
val today = Calendar.getInstance().getTime()
val todayformat = new SimpleDateFormat("yyyyMMdd")
val pindelVcf = new PindelVCF(this)
pindelVcf.deps ::= pindel.outputFile
pindelVcf.pindelOutputInputHolder = pindel.outputFile
pindelVcf.pindelOutputRoot = Some(new File(pindelDir, "sample"))
pindelVcf.rDate = todayformat.format(today) // officially, we should enter the date of the genome here
pindelVcf.outputVCF = new File(pindelDir, s"${sample}.pindel.vcf")
add(pindelVcf)
}
override def defaultCoreMemory = 5.0
override def defaultThreads = 8
override def versionRegex = """Pindel version:? (.*)""".r
override def versionExitcode = List(1)
override def versionCommand = executable
@Input(doc = "The pindel configuration file")
var input: File = _
@Input(doc = "Fasta reference")
var reference: File = config("reference")
// this is a pointer to where the results files will be stored
// inside this directory, we can expect files named:
// <prefix>_D
// <prefix>_SI
// <prefix>_I
// <prefix>_TR
@Argument(doc = "Work directory")
var workdir: String = _
@Output(doc = "Pindel VCF output")
var output: File = _
var window_size: Option[Int] = config("window_size", default = 5)
override def beforeCmd() {
}
def cmdLine = required(executable) +
"-i " + required(input) +
"-f " + required(reference) +
"-o " + required(output) +
optional("-w", window_size) +
optional("-T", nCoresRequest)
}
object PindelCaller {
def apply(root: Configurable, input: File, output: File): PindelCaller = {
val caller = new PindelCaller(root)
caller.input = input
caller.output = output
caller
}
}
}
\ No newline at end of file
......@@ -19,14 +19,12 @@ import java.io.{ File, FileOutputStream }
import java.nio.file.Paths
import com.google.common.io.Files
import nl.lumc.sasc.biopet.extensions.breakdancer.{ BreakdancerVCF, BreakdancerConfig, BreakdancerCaller }
import nl.lumc.sasc.biopet.extensions.breakdancer.{ BreakdancerCaller, BreakdancerConfig, BreakdancerVCF }
import nl.lumc.sasc.biopet.extensions.clever.CleverCaller
import nl.lumc.sasc.biopet.extensions.delly.DellyCaller
import nl.lumc.sasc.biopet.utils.config.Config
import nl.lumc.sasc.biopet.extensions.Freebayes
import nl.lumc.sasc.biopet.extensions.gatk.CombineVariants
import nl.lumc.sasc.biopet.extensions.tools.VcfFilter
import nl.lumc.sasc.biopet.extensions.pindel.{ PindelVCF, PindelConfig, PindelCaller }
import nl.lumc.sasc.biopet.utils.ConfigUtils
import nl.lumc.sasc.biopet.utils.config.Config
import org.apache.commons.io.FileUtils
import org.broadinstitute.gatk.queue.QSettings
import org.scalatest.Matchers
......@@ -57,25 +55,28 @@ class ShivaSvCallingTest extends TestNGSuite with Matchers {
bams <- 0 to 3;
delly <- bool;
clever <- bool;
breakdancer <- bool
) yield Array(bams, delly, clever, breakdancer)).toArray
breakdancer <- bool;
pindel <- bool
) yield Array(bams, delly, clever, breakdancer, pindel)).toArray
}
@Test(dataProvider = "shivaSvCallingOptions")
def testShivaSvCalling(bams: Int,
delly: Boolean,
clever: Boolean,
breakdancer: Boolean) = {
breakdancer: Boolean,
pindel: Boolean) = {
val callers: ListBuffer[String] = ListBuffer()
if (delly) callers.append("delly")
if (clever) callers.append("clever")
if (breakdancer) callers.append("breakdancer")
if (pindel) callers.append("pindel")
val map = Map("sv_callers" -> callers.toList)
val pipeline = initPipeline(map)
pipeline.inputBams = (for (n <- 1 to bams) yield n.toString -> ShivaSvCallingTest.inputTouch("bam_" + n + ".bam")).toMap
val illegalArgumentException = pipeline.inputBams.isEmpty || (!delly && !clever && !breakdancer)
val illegalArgumentException = pipeline.inputBams.isEmpty || (!delly && !clever && !breakdancer && !pindel)
if (illegalArgumentException) intercept[IllegalArgumentException] {
pipeline.init()
......@@ -93,10 +94,17 @@ class ShivaSvCallingTest extends TestNGSuite with Matchers {
else assert(!summaryCallers.contains("clever"))
if (breakdancer) assert(summaryCallers.contains("breakdancer"))
else assert(!summaryCallers.contains("breakdancer"))
if (pindel) assert(summaryCallers.contains("pindel"))
else assert(!summaryCallers.contains("pindel"))
pipeline.functions.count(_.isInstanceOf[BreakdancerCaller]) shouldBe (if (breakdancer) bams else 0)
pipeline.functions.count(_.isInstanceOf[BreakdancerConfig]) shouldBe (if (breakdancer) bams else 0)
pipeline.functions.count(_.isInstanceOf[BreakdancerCaller]) shouldBe (if (breakdancer) bams else 0)
pipeline.functions.count(_.isInstanceOf[BreakdancerVCF]) shouldBe (if (breakdancer) bams else 0)
pipeline.functions.count(_.isInstanceOf[PindelConfig]) shouldBe (if (pindel) bams else 0)
pipeline.functions.count(_.isInstanceOf[PindelCaller]) shouldBe (if (pindel) bams else 0)
pipeline.functions.count(_.isInstanceOf[PindelVCF]) shouldBe (if (pindel) bams else 0)
pipeline.functions.count(_.isInstanceOf[CleverCaller]) shouldBe (if (clever) bams else 0)
pipeline.functions.count(_.isInstanceOf[DellyCaller]) shouldBe (if (delly) (bams * 4) else 0)
......@@ -208,6 +216,9 @@ object ShivaSvCallingTest {
"tabix" -> Map("exe" -> "test"),
"breakdancerconfig" -> Map("exe" -> "test"),
"breakdancercaller" -> Map("exe" -> "test"),
"pindelconfig" -> Map("exe" -> "test"),
"pindelcaller" -> Map("exe" -> "test"),
"pindelvcf" -> Map("exe" -> "test"),
"clever" -> Map("exe" -> "test"),
"delly" -> Map("exe" -> "test"),
"varscan_jar" -> "test"
......
/target/
\ No newline at end of file
<!--
Biopet is built on top of GATK Queue for building bioinformatic
pipelines. It is mainly intended to support LUMC SHARK cluster which is running
SGE. But other types of HPC that are supported by GATK Queue (such as PBS)
should also be able to execute Biopet tools and pipelines.
Copyright 2014 Sequencing Analysis Support Core - Leiden University Medical Center
Contact us at: sasc@lumc.nl
A dual licensing mode is applied. The source code within this project that are
not part of GATK Queue is freely available for non-commercial use under an AGPL
license; For commercial users or users who do not want to follow the AGPL
license, please contact us to obtain a separate license.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>Yamsvp</artifactId>
<packaging>jar</packaging>
<parent>
<groupId>nl.lumc.sasc</groupId>
<artifactId>Biopet</artifactId>
<version>0.6.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<inceptionYear>2014</inceptionYear>
<name>Yamsvp</name>
<dependencies>
<dependency>
<groupId>nl.lumc.sasc</groupId>
<artifactId>BiopetFramework</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>nl.lumc.sasc</groupId>
<artifactId>Mapping</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>
/**
* Biopet is built on top of GATK Queue for building bioinformatic
* pipelines. It is mainly intended to support LUMC SHARK cluster which is running
* SGE. But other types of HPC that are supported by GATK Queue (such as PBS)
* should also be able to execute Biopet tools and pipelines.
*
* Copyright 2014 Sequencing Analysis Support Core - Leiden University Medical Center
*
* Contact us at: sasc@lumc.nl
*
* A dual licensing mode is applied. The source code within this project that are
* not part of GATK Queue is freely available for non-commercial use under an AGPL
* license; For commercial users or users who do not want to follow the AGPL
* license, please contact us to obtain a separate license.
*/
package nl.lumc.sasc.biopet.extensions.pindel
import java.io.File
import nl.lumc.sasc.biopet.core.{ BiopetQScript, PipelineCommand }
import nl.lumc.sasc.biopet.utils.config.Configurable
import org.broadinstitute.gatk.queue.QScript
/// Pindel is actually a mini pipeline executing binaries from the pindel package
class Pindel(val root: Configurable) extends QScript with BiopetQScript {
def this() = this(null)
@Input(doc = "Input file (bam)")
var input: File = _
@Input(doc = "Reference Fasta file")
var reference: File = _
@Argument(doc = "Work directory")
var workdir: String = _
// @Output(doc = "Pindel VCF output")
// lazy val outputvcf: File = {
// new File(workdir + "/" + input.getName.substring(0, input.getName.lastIndexOf(".bam")) + ".pindel.vcf")
// }
@Output(doc = "Pindel config")
lazy val configfile: File = {
new File(workdir + "/" + input.getName.substring(0, input.getName.lastIndexOf(".bam")) + ".pindel.cfg")
}
@Output(doc = "Pindel raw output")
lazy val outputvcf: File = {
new File(workdir + "/" + input.getName.substring(0, input.getName.lastIndexOf(".bam")) + ".pindel.vcf")
}
override def init() {
}
def biopetScript() {
// read config and set all parameters for the pipeline
logger.info("Starting Pindel configuration")
val cfg = PindelConfig(this, input, this.configfile)
outputFiles += ("pindel_cfg" -> cfg.output)
add(cfg)
val output: File = this.outputvcf
val pindel = PindelCaller(this, cfg.output, output)
add(pindel)
outputFiles += ("pindel_tsv" -> pindel.output)
// val output_vcf: File = this.outputvcf
// convert this tsv to vcf using the python script
}
// private def swapExtension(inputFile: String) = inputFile.substring(0, inputFile.lastIndexOf(".bam")) + ".pindel.tsv"
}
object Pindel extends PipelineCommand {
def apply(root: Configurable, input: File, reference: File, runDir: String): Pindel = {
val pindel = new Pindel(root)
pindel.input = input
pindel.reference = reference
pindel.workdir = runDir
// run the following for activating the pipeline steps
pindel.init()
pindel.biopetScript()
pindel
}
}
\ No newline at end of file
/**
* Biopet is built on top of GATK Queue for building bioinformatic
* pipelines. It is mainly intended to support LUMC SHARK cluster which is running
* SGE. But other types of HPC that are supported by GATK Queue (such as PBS)
* should also be able to execute Biopet tools and pipelines.
*
* Copyright 2014 Sequencing Analysis Support Core - Leiden University Medical Center
*
* Contact us at: sasc@lumc.nl
*
* A dual licensing mode is applied. The source code within this project that are
* not part of GATK Queue is freely available for non-commercial use under an AGPL
* license; For commercial users or users who do not want to follow the AGPL
* license, please contact us to obtain a separate license.
*/
/*
* Structural variation calling
*/
package nl.lumc.sasc.biopet.pipelines.yamsvp
import java.io.File
import nl.lumc.sasc.biopet.core.{MultiSampleQScript, PipelineCommand}
import nl.lumc.sasc.biopet.extensions.Ln
import nl.lumc.sasc.biopet.extensions.breakdancer.Breakdancer
import nl.lumc.sasc.biopet.extensions.clever.CleverCaller
import nl.lumc.sasc.biopet.extensions.igvtools.IGVToolsCount
import nl.lumc.sasc.biopet.extensions.sambamba.{SambambaMarkdup, SambambaMerge}
import nl.lumc.sasc.biopet.pipelines.shiva.Delly
import nl.lumc.sasc.biopet.utils.config.Configurable
//import nl.lumc.sasc.biopet.extensions.pindel.Pindel
import nl.lumc.sasc.biopet.pipelines.bammetrics.BamMetrics
import nl.lumc.sasc.biopet.pipelines.mapping.Mapping
import org.broadinstitute.gatk.queue.QScript
import org.broadinstitute.gatk.queue.engine.JobRunInfo
import org.broadinstitute.gatk.queue.function._
class Yamsvp(val root: Configurable) extends QScript with MultiSampleQScript {
qscript =>
def this() = this(null)
def summaryFile = null
def summaryFiles = Map()
def summarySettings = Map()
var reference: File = config("reference")
def makeSample(id: String) = new Sample(id)
class Sample(sampleId: String) extends AbstractSample(sampleId) {
def summaryFiles = Map()
def summaryStats = Map()
val alignmentDir: String = sampleDir + "alignment/"
val svcallingDir: String = sampleDir + "svcalls/"
def makeLibrary(id: String) = new Library(id)
class Library(libraryId: String) extends AbstractLibrary(libraryId) {
// val runDir: String = alignmentDir + "run_" + libraryId + "/"
def summaryFiles = Map()
def summaryStats = Map()
val mapping = new Mapping(qscript)
mapping.libId = Some(libraryId)
mapping.sampleId = Some(sampleId)
protected def addJobs(): Unit = {
mapping.input_R1 = config("R1")
mapping.input_R2 = config("R2")
mapping.outputDir = libDir
mapping.init()
mapping.biopetScript()
qscript.addAll(mapping.functions)
}
}
protected def addJobs(): Unit = {
val libraryBamfiles = libraries.map(_._2.mapping.finalBamFile).toList
val bamFile: File = if (libraryBamfiles.size == 1) {
val alignmentlink = Ln(qscript, libraryBamfiles.head,
alignmentDir + sampleId + ".merged.bam", relative = true)
alignmentlink.isIntermediate = true
add(alignmentlink)
alignmentlink.output
} else if (libraryBamfiles.size > 1) {
val mergeSamFiles = new SambambaMerge(qscript)
mergeSamFiles.input = libraryBamfiles
mergeSamFiles.output = sampleDir + sampleId + ".merged.bam"
mergeSamFiles.isIntermediate = true
add(mergeSamFiles)
mergeSamFiles.output
} else null
val bamMarkDup = SambambaMarkdup(qscript, bamFile)
add(bamMarkDup)
addAll(BamMetrics(qscript, bamMarkDup.output, alignmentDir + "metrics" + File.separator).functions)
// create an IGV TDF file
val tdfCount = IGVToolsCount(qscript, bamMarkDup.output, config("genome_name", default = "hg19"))
add(tdfCount)
/// bamfile will be used as input for the SV callers. First run Clever
// val cleverVCF : File = sampleDir + "/" + sampleID + ".clever.vcf"
val cleverDir = svcallingDir + sampleId + ".clever/"
val clever = CleverCaller(qscript, bamMarkDup.output, qscript.reference, svcallingDir, cleverDir)
add(clever)
val clever_vcf = Ln(qscript, clever.outputvcf, svcallingDir + sampleId + ".clever.vcf", relative = true)
add(clever_vcf)
val breakdancerDir = svcallingDir + sampleId + ".breakdancer/"
val breakdancer = Breakdancer(qscript, bamMarkDup.output, qscript.reference, breakdancerDir)
addAll(breakdancer.functions)
val bd_vcf = Ln(qscript, breakdancer.outputvcf, svcallingDir + sampleId + ".breakdancer.vcf", relative = true)
add(bd_vcf)
val dellyDir = svcallingDir + sampleId + ".delly/"
val delly = Delly(qscript, bamMarkDup.output, dellyDir)
addAll(delly.functions)
val delly_vcf = Ln(qscript, delly.outputvcf, svcallingDir + sampleId + ".delly.vcf", relative = true)
add(delly_vcf)
// for pindel we should use per library config collected into one config file
// val pindelDir = svcallingDir + sampleID + ".pindel/"
// val pindel = Pindel(qscript, analysisBam, this.reference, pindelDir)
// sampleOutput.vcf += ("pindel" -> List(pindel.outputvcf))
// addAll(pindel.functions)
//
// val pindel_vcf = Ln(qscript, pindel.outputvcf, svcallingDir + sampleID + ".pindel.vcf", relative = true)
// add(pindel_vcf)
//
}
}
def addMultiSampleJobs() = {}
def init() {
}
def biopetScript() {
logger.info("Starting YAM SV Pipeline")
addSamplesJobs()
}
override def onExecutionDone(jobs: Map[QFunction, JobRunInfo], success: Boolean) {
logger.info("YAM SV Pipeline has run .......................")
}
}
object Yamsvp extends PipelineCommand
\ No newline at end of file
#
# Biopet is built on top of GATK Queue for building bioinformatic
# pipelines. It is mainly intended to support LUMC SHARK cluster which is running
# SGE. But other types of HPC that are supported by GATK Queue (such as PBS)
# should also be able to execute Biopet tools and pipelines.
#
# Copyright 2014 Sequencing Analysis Support Core - Leiden University Medical Center
#
# Contact us at: sasc@lumc.nl
#
# A dual licensing mode is applied. The source code within this project that are
# not part of GATK Queue is freely available for non-commercial use under an AGPL
# license; For commercial users or users who do not want to follow the AGPL
# license, please contact us to obtain a separate license.
#
# Set root logger level to DEBUG and its only appender to A1.
log4j.rootLogger=ERROR, A1
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A1 uses PatternLayout.
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=%-5p [%d] [%C{1}] - %m%n
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment