Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Mirrors
biopet.biopet
Commits
263ca3bb
Commit
263ca3bb
authored
Feb 16, 2017
by
Peter van 't Hof
Browse files
Added more run data
parent
414f406d
Changes
6
Hide whitespace changes
Inline
Side-by-side
biopet-core/src/main/scala/nl/lumc/sasc/biopet/core/BiopetQScript.scala
View file @
263ca3bb
...
...
@@ -88,7 +88,8 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
case
q
:
MultiSampleQScript
if
q.onlySamples.nonEmpty
&&
!q.samples.forall
(
x
=>
q.onlySamples.contains
(
x.
_
1
))
=>
logger
.
info
(
"Write report is skipped because sample flag is used"
)
case
_
=>
reportClass
.
foreach
{
report
=>
add
(
report
)
//add(report)
//FIXEME: Disabled this till implementation of SummaryDb is done
}
}
...
...
@@ -108,6 +109,8 @@ trait BiopetQScript extends Configurable with GatkLogging { qscript: QScript =>
if
(
count
%
500
==
0
)
logger
.
info
(
s
"Preprocessing done for ${count} jobs out of ${functions.length} total"
)
}
logger
.
info
(
s
"Preprocessing done for ${functions.length} jobs"
)
val
logDir
=
new
File
(
outputDir
,
".log"
+
File
.
separator
+
qSettings
.
runName
.
toLowerCase
)
if
(
outputDir
.
getParentFile
.
canWrite
||
(
outputDir
.
exists
&&
outputDir
.
canWrite
))
...
...
biopet-core/src/main/scala/nl/lumc/sasc/biopet/core/summary/SummaryQScript.scala
View file @
263ca3bb
...
...
@@ -14,14 +14,14 @@
*/
package
nl.lumc.sasc.biopet.core.summary
import
java.io.
{
File
,
PrintWriter
}
import
java.io.
{
File
,
PrintWriter
}
import
java.sql.Date
import
nl.lumc.sasc.biopet.core._
import
nl.lumc.sasc.biopet.core.extensions.
{
CheckChecksum
,
Md5sum
}
import
nl.lumc.sasc.biopet.core.extensions.
{
CheckChecksum
,
Md5sum
}
import
nl.lumc.sasc.biopet.utils.summary.db.SummaryDb
import
org.broadinstitute.gatk.queue.QScript
import
org.broadinstitute.gatk.queue.engine.JobRunInfo
import
org.broadinstitute.gatk.queue.function.QFunction
import
nl.lumc.sasc.biopet.LastCommitHash
import
scala.collection.mutable
import
scala.concurrent.Await
...
...
@@ -124,7 +124,8 @@ trait SummaryQScript extends BiopetQScript { qscript: QScript =>
case
q
:
BiopetQScript
=>
q
.
outputDir
case
_
=>
throw
new
IllegalStateException
(
"Root should be a BiopetQscript"
)
}
val
id
=
Await
.
result
(
db
.
createRun
(
summaryName
,
dir
.
getAbsolutePath
),
Duration
.
Inf
)
val
id
=
Await
.
result
(
db
.
createRun
(
summaryName
,
dir
.
getAbsolutePath
,
nl
.
lumc
.
sasc
.
biopet
.
Version
,
LastCommitHash
,
new
Date
(
System
.
currentTimeMillis
())),
Duration
.
Inf
)
runIdFile
.
getParentFile
.
mkdir
()
val
writer
=
new
PrintWriter
(
runIdFile
)
writer
.
println
(
id
)
...
...
biopet-core/src/main/scala/nl/lumc/sasc/biopet/core/summary/WriteSummary.scala
View file @
263ca3bb
...
...
@@ -253,8 +253,8 @@ class WriteSummary(val parent: SummaryQScript) extends InProcessFunction with Co
}).
foldRight
(
pipelineMap
)((
a
,
b
)
=>
ConfigUtils
.
mergeMaps
(
a
.
_1
,
b
,
a
.
_2
))
val
combinedMap
=
//(for (qscript <- qscript.summaryQScripts) yield {
// ConfigUtils.fileToConfigMap(qscript.summaryFile)
// }).foldRight(jobsMap)((a, b) => ConfigUtils.mergeMaps(a, b)) ++
// ConfigUtils.fileToConfigMap(qscript.summaryFile)
// }).foldRight(jobsMap)((a, b) => ConfigUtils.mergeMaps(a, b)) ++
jobsMap
++
Map
(
"meta"
->
Map
(
"last_commit_hash"
->
LastCommitHash
,
"pipeline_version"
->
nl
.
lumc
.
sasc
.
biopet
.
Version
,
...
...
biopet-tools/src/main/scala/nl/lumc/sasc/biopet/tools/SummaryToSqlite.scala
deleted
100644 → 0
View file @
414f406d
package
nl.lumc.sasc.biopet.tools
import
java.io.File
import
nl.lumc.sasc.biopet.utils.summary.db.SummaryDb
import
nl.lumc.sasc.biopet.utils.
{
ConfigUtils
,
ToolCommand
}
import
slick.driver.H2Driver.api._
import
scala.concurrent.Await
import
scala.concurrent.duration.Duration
/**
* Created by pjvanthof on 26/01/2017.
*/
object
SummaryToSqlite
extends
ToolCommand
{
case
class
Args
(
inputJson
:
File
=
null
,
outputSqlite
:
File
=
null
,
force
:
Boolean
=
false
)
extends
AbstractArgs
class
OptParser
extends
AbstractOptParser
{
opt
[
File
](
'I'
,
"inputJson"
)
required
()
maxOccurs
1
valueName
"<file>"
action
{
(
x
,
c
)
=>
c
.
copy
(
inputJson
=
x
)
}
text
"Input json file"
opt
[
File
](
'o'
,
"outputHdf5"
)
required
()
maxOccurs
1
valueName
"<file>"
action
{
(
x
,
c
)
=>
c
.
copy
(
outputSqlite
=
x
)
}
text
"Output hdf5 file"
opt
[
Unit
](
'f'
,
"force"
)
action
{
(
x
,
c
)
=>
c
.
copy
(
force
=
true
)
}
text
"If database already exist it will be moved"
}
def
main
(
args
:
Array
[
String
])
:
Unit
=
{
val
argsParser
=
new
OptParser
val
cmdArgs
=
argsParser
.
parse
(
args
,
Args
())
getOrElse
(
throw
new
IllegalArgumentException
)
logger
.
info
(
"Start"
)
val
jsonMap
=
ConfigUtils
.
fileToConfigMap
(
cmdArgs
.
inputJson
)
if
(
cmdArgs
.
outputSqlite
.
exists
())
{
if
(
cmdArgs
.
force
)
{
logger
.
warn
(
"Deleting old database"
)
cmdArgs
.
outputSqlite
.
delete
()
}
else
throw
new
IllegalArgumentException
(
s
"Db already exist: ${cmdArgs.outputSqlite}"
)
}
val
db
=
Database
.
forURL
(
s
"jdbc:sqlite:${cmdArgs.outputSqlite.getAbsolutePath}"
,
driver
=
"org.sqlite.JDBC"
)
val
summary
=
new
SummaryDb
(
db
)
summary
.
createTables
val
runId
=
Await
.
result
(
summary
.
createRun
(
"runName"
,
"kdfhla"
),
Duration
.
Inf
)
val
runs
=
Await
.
result
(
summary
.
getRuns
(),
Duration
.
Inf
)
List
(
"1"
,
"2"
,
"3"
,
"4"
).
foreach
(
x
=>
Await
.
result
(
summary
.
createSample
(
x
,
runId
,
Some
(
"""{"father": "blabla"}"""
)),
Duration
.
Inf
))
println
(
Await
.
result
(
summary
.
getSamples
(),
Duration
.
Inf
))
println
(
Await
.
result
(
summary
.
getSampleTags
(
1
),
Duration
.
Inf
))
Await
.
result
(
summary
.
createLibrary
(
"lib1"
,
runId
,
1
),
Duration
.
Inf
)
println
(
Await
.
result
(
summary
.
getLibraries
(),
Duration
.
Inf
))
val
pipelineId
=
Await
.
result
(
summary
.
forceCreatePipeline
(
"pipelineName"
,
runId
),
Duration
.
Inf
)
println
(
Await
.
result
(
summary
.
getPipelines
(),
Duration
.
Inf
))
Await
.
result
(
summary
.
createStat
(
runId
,
pipelineId
,
None
,
None
,
None
,
"""{"bla": "bla"}"""
),
Duration
.
Inf
)
println
(
Await
.
result
(
summary
.
getStats
(
runId
=
Some
(
runId
),
pipelineId
=
Some
(
pipelineId
),
sampleId
=
Option
(
None
)),
Duration
.
Inf
))
println
(
Await
.
result
(
summary
.
getStat
(
0
,
0
),
Duration
.
Inf
))
db
.
close
()
logger
.
info
(
"Done"
)
}
}
biopet-utils/src/main/scala/nl/lumc/sasc/biopet/utils/summary/db/Schema.scala
View file @
263ca3bb
package
nl.lumc.sasc.biopet.utils.summary.db
import
java.sql.Date
import
slick.driver.H2Driver.api._
/**
...
...
@@ -7,13 +9,17 @@ import slick.driver.H2Driver.api._
*/
object
Schema
{
case
class
Run
(
id
:
Int
,
name
:
String
,
outputDir
:
String
)
case
class
Run
(
id
:
Int
,
name
:
String
,
outputDir
:
String
,
version
:
String
,
commitHash
:
String
,
creationDate
:
Date
)
class
Runs
(
tag
:
Tag
)
extends
Table
[
Run
](
tag
,
"Runs"
)
{
def
id
=
column
[
Int
](
"id"
,
O
.
PrimaryKey
)
def
runName
=
column
[
String
](
"runName"
)
def
outputDir
=
column
[
String
](
"outputDir"
)
def
version
=
column
[
String
](
"version"
)
def
commitHash
=
column
[
String
](
"commitHash"
)
def
creationDate
=
column
[
Date
](
"creationDate"
)
def
*
=
(
id
,
runName
,
outputDir
)
<>
(
Run
.
tupled
,
Run
.
unapply
)
def
*
=
(
id
,
runName
,
outputDir
,
version
,
commitHash
,
creationDate
)
<>
(
Run
.
tupled
,
Run
.
unapply
)
}
val
runs
=
TableQuery
[
Runs
]
...
...
biopet-utils/src/main/scala/nl/lumc/sasc/biopet/utils/summary/db/SummaryDb.scala
View file @
263ca3bb
...
...
@@ -6,9 +6,9 @@ import slick.driver.H2Driver.api._
import
scala.concurrent.ExecutionContext.Implicits.global
import
scala.concurrent.duration.Duration
import
scala.concurrent.
{
Await
,
Future
}
import
java.
io.
{
Closeable
,
File
}
import
scala.concurrent.
{
Await
,
Future
}
import
java.io.
{
Closeable
,
File
}
import
java.
sql.Date
/**
* This class interface wityh a summary database
...
...
@@ -34,9 +34,10 @@ class SummaryDb(db: Database) extends Closeable {
}
/** This method will create a new run and return the runId */
def
createRun
(
runName
:
String
,
outputDir
:
String
)
:
Future
[
Int
]
=
{
def
createRun
(
runName
:
String
,
outputDir
:
String
,
version
:
String
,
commitHash
:
String
,
creationDate
:
Date
)
:
Future
[
Int
]
=
{
val
id
=
Await
.
result
(
db
.
run
(
runs
.
size
.
result
),
Duration
.
Inf
)
db
.
run
(
runs
.
forceInsert
(
Run
(
id
,
runName
,
outputDir
))).
map
(
_
=>
id
)
db
.
run
(
runs
.
forceInsert
(
Run
(
id
,
runName
,
outputDir
,
version
,
commitHash
,
creationDate
))).
map
(
_
=>
id
)
}
/** This will return all runs that match the critiria given */
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment