Skip to content
Commits on Source (6)
.git
.gitignore
.jar_opt
.classpath
......
version: 2
group_defaults:
approve_by_comment:
approve_regex: '^:\+1:'
enabled: true
groups:
reviewers:
required: 1
conditions:
branches:
- master
teams:
- dsde-pipelines-developers
users:
- nh13
- tfenne
- yfarjoun
dist: trusty
language: java
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
......@@ -8,6 +9,7 @@ cache:
- $HOME/.m2
jdk:
- oraclejdk8
- openjdk8
before_install:
- wget -q -O - https://dl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
- sudo apt-get -qq update
......@@ -15,3 +17,6 @@ before_install:
script: ./gradlew jacocoTestReport
after_success:
- ./gradlew coveralls
- if [ "$TRAVIS_BRANCH" == "master" ]; then
./gradlew uploadArchives;
fi
......@@ -10,23 +10,19 @@ RUN apt-get update && \
apt-get autoremove -y
# Assumes Dockerfile lives in root of the git repo. Pull source files into container
COPY build.xml /usr/picard/build.xml
COPY src /usr/picard/src
COPY lib /usr/picard/lib
COPY / /usr/picard/
WORKDIR /usr/picard
# Clone out htsjdk. First turn off git ssl verification
RUN git config --global http.sslVerify false && git clone https://github.com/samtools/htsjdk.git
# Build the distribution jar, clean up everything else
RUN ant clean all && \
mv dist/picard.jar picard.jar && \
mv src/scripts/picard/docker_helper.sh docker_helper.sh && \
ant clean && \
rm -rf htsjdk && \
RUN ./gradlew shadowJar && \
mv build/libs/picard.jar picard.jar && \
mv src/main/resources/picard/docker_helper.sh docker_helper.sh && \
./gradlew clean && \
rm -rf src && \
rm -rf lib && \
rm build.xml
rm -rf gradle && \
rm -rf .git && \
rm gradlew && \
rm build.gradle
RUN mkdir /usr/working
WORKDIR /usr/working
......
MIT License
Copyright (c) 2017 Broad Institute
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
[![Coverage Status](https://coveralls.io/repos/github/broadinstitute/picard/badge.svg?branch=master)](https://coveralls.io/github/broadinstitute/picard?branch=master)
[![Build Status](https://travis-ci.org/broadinstitute/picard.svg?branch=master)](https://travis-ci.org/broadinstitute/picard)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://github.com/broadinstitute/picard/blob/master/LICENSE.txt)
A set of Java command line tools for manipulating high-throughput sequencing (HTS) data and formats.
......@@ -13,7 +14,7 @@ As of version 2.0.1 (Nov. 2015) Picard requires Java 1.8 (jdk8u66). The last ver
* First, clone the repo:
```
git clone git@github.com:broadinstitute/picard.git
git clone https://github.com/broadinstitute/picard.git
cd picard/
```
......@@ -82,9 +83,38 @@ During development in Picard, it is sometimes necessary to build locally against
#### Releasing Picard
Full instructions on how to create a new release of Picard are [here](https://github.com/broadinstitute/picard/wiki/How-to-release-Picard)
Full instructions on how to create a new release of
Picard are [here](https://github.com/broadinstitute/picard/wiki/How-to-release-Picard)
----
#### Path providers
Picard has limited support for reading from Path providers.
Currently only google's api is supported, and only a few tools support this.
To run with this support you need to compile the cloudJar target with gradle:
```bash
./gradlew cloudJar
```
then run picard as follows:
```bash
java -jar build/lib/picardcloud.jar <Picard arguments starting from program>
```
For example:
```bash
java -jar build/lib/picardcloud.jar CrosscheckFingerprints \
I=gs://sample1.vcf \
I=gs://sample2.vcf \
CROSSCHECK_BY=FILE \
H=Haplotype_db.txt \
O=crosscheck.out
```
Alternatively, you can run the tool via the [GATK](https://software.broadinstitute.org/gatk/download/) which bundles the Google-Cloud
jar, and should thus "Just Work".
#### GA4GH API
It's also possible to build a version of Picard that supports reading from
GA4GH API, e.g. Google Genomics:
......
......@@ -27,7 +27,7 @@ repositories {
mavenLocal()
mavenCentral()
maven {
url "https://artifactory.broadinstitute.org/artifactory/libs-snapshot/" //for htsjdk snapshots
url "https://broadinstitute.jfrog.io/broadinstitute/libs-snapshot/" //for htsjdk snapshots
}
}
......@@ -47,14 +47,48 @@ jacoco {
toolVersion = "0.7.5.201505241946"
}
final htsjdkVersion = System.getProperty('htsjdk.version', '2.8.0')
final htsjdkVersion = System.getProperty('htsjdk.version', '2.14.3')
// We use a custom shaded build of the NIO library to avoid a regression in the authentication layer.
// GATK does the same, see https://github.com/broadinstitute/gatk/issues/3591
final googleNio = 'org.broadinstitute:google-cloud-nio-GATK4-custom-patch:0.20.4-alpha-GCS-RETRY-FIX:shaded'
// Get the jdk files we need to run javaDoc. We need to use these during compile, testCompile,
// test execution, and gatkDoc generation, but we don't want them as part of the runtime
// classpath and we don't want to redistribute them in the uber jar.
final javadocJDKFiles = files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs())
configurations {
cloudConfiguration {
extendsFrom runtime
dependencies {
cloudConfiguration(googleNio)
}
}
}
dependencies {
compile('com.intel.gkl:gkl:0.8.2') {
exclude module: 'htsjdk'
}
compile 'com.google.guava:guava:15.0'
compile 'com.github.samtools:htsjdk:' + htsjdkVersion
//tools dependency for doclet requires sdk devel
compile(files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs()))
compile 'org.broadinstitute:barclay:2.0.0'
compileOnly googleNio
// javadoc utilities; compile/test only to prevent redistribution of sdk jars
compileOnly(javadocJDKFiles)
testCompile(javadocJDKFiles)
testCompile 'org.testng:testng:6.9.10'
testCompile 'org.apache.commons:commons-lang3:3.6'
}
configurations.all {
resolutionStrategy {
// force the htsjdk version so we don't get a different one transitively
force 'com.github.samtools:htsjdk:' + htsjdkVersion
}
}
sourceCompatibility = 1.8
......@@ -69,9 +103,29 @@ group = 'com.github.broadinstitute'
defaultTasks 'all'
task all(dependsOn: ['jar', 'distZip', 'documentAll', 'shadowJar', 'currentJar'])
task all(dependsOn: ['jar', 'distZip', 'javadoc', 'shadowJar', 'barclayShadowJar', 'currentJar'])
jar {
// Source file names for the picard command line properties file. We select and include only one of
// these two files in each jar, renamed to "picardCmdLine.properties", depending on which parser we
// want enabled.
final String legacySourcePropertyFile = 'legacyParserProperties.properties'
final String barclaySourcePropertyFile = 'barclayParserProperties.properties'
// Target name/location for the picard command line properties file; one of the above source
// files will be included at this path/location for runtime access
final String picardTargetPropertiesPath = 'picard'
final String picardTargetPropertyFile = 'picardCmdLine.properties'
sourceSets {
// no need to include these in the jar files; the correct one is selected by the build to control Barclay
main {
resources {
exclude ('properties.templates/**')
}
}
}
tasks.withType(Jar){
manifest {
attributes 'Main-Class': 'picard.cmdline.PicardCommandLine',
'Implementation-Title': 'Picard',
......@@ -79,11 +133,59 @@ jar {
'Implementation-Version': version
}
}
// This is a hack to disable the java 8 default javadoc lint until we fix the html formatting
if (JavaVersion.current().isJava8Compatible()) {
jar {
from('src/main/resources/properties.templates') {
// for the default jar, we want the properties file that enables the picard parser
include legacySourcePropertyFile
into picardTargetPropertiesPath
rename { String fileName ->
fileName.replace(legacySourcePropertyFile, picardTargetPropertyFile)
}
}
}
tasks.withType(Javadoc) {
// do this for all javadoc tasks, including gatkDoc
options.addStringOption('Xdoclint:none')
}
javadoc {
options.addStringOption('Xdoclint:none', '-quiet')
}
// Generate Picard Online Doc
task picardDoc(type: Javadoc, dependsOn: ['cleanPicardDoc', classes]) {
final File picardDocDir = new File("build/docs/picarddoc")
doFirst {
// make sure the output folder exists or we can create it
if (!picardDocDir.exists() && !picardDocDir.mkdirs()) {
throw new GradleException(String.format("Failure creating folder (%s) for picardDocDir doc output in task (%s)",
picardDocDir.getAbsolutePath(),
it.name));
}
copy {
from('src/main/resources/picard/helpTemplates')
include 'picardDoc.css'
into picardDocDir
}
}
source = sourceSets.main.allJava
// The gatkDoc process instantiates any documented feature classes, so to run it we need the entire
// runtime classpath, as well as jdk javadoc files such as tools.jar, where com.sun.javadoc lives.
classpath = sourceSets.main.runtimeClasspath + javadocJDKFiles
options.docletpath = classpath.asType(List)
options.doclet = "picard.util.help.PicardHelpDoclet"
outputs.dir(picardDocDir)
options.destinationDirectory(picardDocDir)
options.addStringOption("settings-dir", "src/main/resources/picard/helpTemplates/");
options.addStringOption("output-file-extension", "html")
options.addStringOption("absolute-version", getVersion())
options.addStringOption("build-timestamp", new Date().format("dd-mm-yyyy hh:mm:ss"))
options.addStringOption("verbose")
}
task currentJar(type: Copy){
......@@ -94,6 +196,57 @@ task currentJar(type: Copy){
shadowJar {
finalizedBy currentJar
from('src/main/resources/properties.templates') {
// for the default jar, we want the properties file that enables the picard parser
include legacySourcePropertyFile
into picardTargetPropertiesPath
rename { String fileName ->
fileName.replace(legacySourcePropertyFile, picardTargetPropertyFile)
}
}
}
task cloudJar(type: com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar) {
configurations = [project.configurations.cloudConfiguration]
from project.sourceSets.main.output
archiveName 'picardcloud.jar'
}
// Create picardBarclay.jar, which is a identical to picard.jar, but contains a .properties
// file that tells Picard to use the Barclay command line parser instead of the Picard
// command line parser.
//
task barclayShadowJar(type: com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar) {
configurations = [project.configurations.runtime]
from project.sourceSets.main.output
from('src/main/resources/properties.templates') {
// for the default jar, we want the properties file that enables the picard parser
include barclaySourcePropertyFile
into picardTargetPropertiesPath
rename { String fileName ->
fileName.replace(barclaySourcePropertyFile, picardTargetPropertyFile)
}
}
archiveName 'picardBarclay.jar'
}
// Run the tests using the legacy parser only. Assumes that test code is written using
// legacy command line parser syntax.
task legacyTest(type: Test)
// Run the tests using the Barclay command line parser (useLegacyParser=false), which requires
// conversion of test command lines from Picard-style command line syntax to Barclay-style syntax.
task barclayTest(type: Test) {
systemProperty 'picard.convertCommandLine', 'true'
systemProperty 'picard.useLegacyParser', 'false'
}
// Run tests using both the legacy and barclay command line parsers.
test {
dependsOn barclayTest
}
tasks.withType(Test) {
......@@ -146,99 +299,12 @@ tasks.withType(Test) {
}
}
ext.htmlDir = new File("build/docs/html")
ext.htmlDirInc = new File(htmlDir, "_includes")
ext.commandClasses = ["picard.sam.AddCommentsToBam", "picard.sam.AddOrReplaceReadGroups", "picard.util.BaitDesigner", "picard.fastq.BamToBfq",
"picard.sam.BamIndexStats", "picard.util.BedToIntervalList", "picard.sam.BuildBamIndex", "picard.analysis.directed.CalculateHsMetrics",
"picard.sam.CalculateReadGroupChecksum", "picard.sam.CleanSam", "picard.analysis.CollectAlignmentSummaryMetrics",
"picard.analysis.CollectBaseDistributionByCycle", "picard.analysis.CollectGcBiasMetrics", "picard.illumina.quality.CollectHiSeqXPfFailMetrics",
"picard.analysis.directed.CollectHsMetrics", "picard.illumina.CollectIlluminaBasecallingMetrics", "picard.illumina.CollectIlluminaLaneMetrics",
"picard.analysis.CollectInsertSizeMetrics", "picard.analysis.CollectJumpingLibraryMetrics", "picard.analysis.CollectMultipleMetrics",
"picard.analysis.CollectOxoGMetrics", "picard.analysis.CollectQualityYieldMetrics", "picard.analysis.CollectRawWgsMetrics",
"picard.analysis.directed.CollectTargetedPcrMetrics", "picard.analysis.CollectRnaSeqMetrics", "picard.analysis.CollectRrbsMetrics",
"picard.analysis.artifacts.CollectSequencingArtifactMetrics", "picard.vcf.CollectVariantCallingMetrics", "picard.analysis.CollectWgsMetrics",
"picard.analysis.CollectWgsMetricsFromQuerySorted", "picard.analysis.CollectWgsMetricsFromSampledSites",
"picard.analysis.CollectWgsMetricsWithNonZeroCoverage", "picard.analysis.CompareMetrics", "picard.sam.CompareSAMs",
"picard.analysis.artifacts.ConvertSequencingArtifactToOxoG", "picard.sam.CreateSequenceDictionary", "picard.sam.DownsampleSam",
"picard.illumina.ExtractIlluminaBarcodes", "picard.sam.markduplicates.EstimateLibraryComplexity", "picard.sam.FastqToSam", "picard.util.FifoBuffer",
"picard.vcf.MendelianViolations.FindMendelianViolations",
"picard.sam.FilterSamReads", "picard.vcf.filter.FilterVcf", "picard.sam.FixMateInformation", "picard.sam.GatherBamFiles", "picard.vcf.GatherVcfs",
"picard.vcf.GenotypeConcordance", "picard.illumina.IlluminaBasecallsToFastq", "picard.illumina.IlluminaBasecallsToSam", "picard.illumina.CheckIlluminaDirectory",
"picard.sam.CheckTerminatorBlock", "picard.util.IntervalListTools", "picard.util.LiftOverIntervalList", "picard.vcf.LiftoverVcf", "picard.vcf.MakeSitesOnlyVcf",
"picard.sam.markduplicates.MarkDuplicates", "picard.sam.markduplicates.MarkDuplicatesWithMateCigar", "picard.analysis.MeanQualityByCycle",
"picard.sam.MergeBamAlignment", "picard.sam.MergeSamFiles", "picard.vcf.MergeVcfs", "picard.reference.NormalizeFasta", "picard.sam.PositionBasedDownsampleSam",
"picard.reference.ExtractSequences", "picard.analysis.QualityScoreDistribution", "picard.vcf.RenameSampleInVcf", "picard.sam.ReorderSam",
"picard.sam.ReplaceSamHeader", "picard.sam.RevertSam", "picard.sam.RevertOriginalBaseQualitiesAndAddMateCigar", "picard.sam.SamFormatConverter",
"picard.sam.SamToFastq", "picard.util.ScatterIntervalsByNs", "picard.sam.SetNmMdAndUqTags",
"picard.sam.SortSam", "picard.vcf.SortVcf", "picard.sam.SplitSamByLibrary", "picard.sam.markduplicates.UmiAwareMarkDuplicatesWithMateCigar",
"picard.vcf.UpdateVcfSequenceDictionary", "picard.vcf.VcfFormatConverter", "picard.illumina.MarkIlluminaAdapters", "picard.vcf.SplitVcfs",
"picard.sam.ValidateSamFile", "picard.sam.ViewSam", "picard.vcf.VcfToIntervalList"]
//generate documentation
task documentAll(dependsOn: ['documentCommands', 'createMetricsDoc', 'documentStandardOptions']){
doFirst{
htmlDirInc.mkdirs()
}
}
task documentCommands {
def previousDocTask = null
def usageFile = new File(htmlDirInc, "command-line-usage.html")
def sidebarFile = new File(htmlDirInc, "command-line-sidebar.html")
commandClasses.each { mainClass ->
task "document_${mainClass}"(type: JavaExec) {
main ='picard.cmdline.CreateHtmlDocForProgram'
classpath = sourceSets.main.runtimeClasspath
args mainClass
def outputFile = new File(htmlDirInc, mainClass.substring(mainClass.lastIndexOf(".") + 1) + ".html")
doFirst {
htmlDirInc.mkdirs()
standardOutput = new FileOutputStream(outputFile)
}
outputs.file outputFile
if (previousDocTask != null) delegate.dependsOn previousDocTask
previousDocTask = delegate
documentCommands.dependsOn(delegate)
doLast {
usageFile.append("{% include ${mainClass.substring(mainClass.lastIndexOf(".") + 1) + ".html"} %}")
usageFile.append(System.getProperty("line.separator"))
sidebarFile.append("<li><a href=\"command-line-overview.html#${mainClass.substring(mainClass.lastIndexOf(".") + 1)}\">${mainClass.substring(mainClass.lastIndexOf(".") + 1)}</a>")
sidebarFile.append(System.getProperty("line.separator"))
}
}
}
outputs.dir htmlDirInc
}
task documentStandardOptions(type: JavaExec) {
main = 'picard.cmdline.CreateHtmlDocForStandardOptions'
classpath = sourceSets.main.runtimeClasspath
def standardOptionsFile = new File(htmlDirInc, "standard-options.html")
doFirst{
htmlDirInc.mkdirs()
standardOutput = new FileOutputStream(standardOptionsFile)
}
outputs.file standardOptionsFile
}
task createMetricsDoc(dependsOn: classes, type: Javadoc) << {
source = sourceSets.main.allJava
classpath = sourceSets.main.runtimeClasspath
destinationDir = htmlDirInc
options.doclet = 'picard.util.MetricsDoclet'
options.docletpath = sourceSets.main.runtimeClasspath.asType(List)
}
//end generate documentation
task wrapper(type: Wrapper) {
description = "Regenerate the gradle wrapper"
gradleVersion = '3.1'
}
task javadocJar(type: Jar, dependsOn: documentAll) {
task javadocJar(type: Jar) {
classifier = 'javadoc'
from 'build/docs/javadoc'
}
......@@ -280,7 +346,7 @@ uploadArchives {
authentication(userName: project.findProperty("sonatypeUsername"), password: project.findProperty("sonatypePassword"))
}
snapshotRepository(url: "https://artifactory.broadinstitute.org/artifactory/libs-snapshot-local/") {
snapshotRepository(url: "https://broadinstitute.jfrog.io/broadinstitute/libs-snapshot-local/") {
authentication(userName: System.env.ARTIFACTORY_USERNAME, password: System.env.ARTIFACTORY_PASSWORD)
}
......@@ -318,13 +384,20 @@ uploadArchives {
}
}
ext.htmlDir = new File("build/docs/html")
//update static web docs
task copyJavadoc(dependsOn: 'javadoc', type: Copy) {
from 'build/docs/javadoc'
into "$htmlDir/javadoc"
}
task updateGhPages(dependsOn: ['copyJavadoc', 'documentAll']){
task copyPicardDoc(dependsOn: 'picardDoc', type: Copy){
from 'build/docs/picarddoc'
into "$htmlDir/picarddoc"
}
task updateGhPages(dependsOn: ['copyJavadoc', 'copyPicardDoc']){
outputs.dir htmlDir
}
......@@ -336,6 +409,6 @@ githubPages {
deleteExistingFiles = false
pages {
from htmlDir
into '.'
into 'newdocs'
}
}
picard-tools (2.18.2+dfsg-1) UNRELEASED; urgency=medium
* New upstream version
* Backports-friendly debhelper 11
* Bump versioned Build-Depends on libhtsjdk-java and default-jdk
-- Andreas Tille <tille@debian.org> Tue, 17 Apr 2018 22:45:38 +0200
picard-tools (2.8.1+dfsg-4) unstable; urgency=medium
* Team upload.
......
......@@ -7,14 +7,14 @@ Uploaders: Charles Plessy <plessy@debian.org>,
Steffen Moeller <moeller@debian.org>
Section: science
Priority: optional
Build-Depends: default-jdk (>= 2:1.8),
debhelper (>= 11),
Build-Depends: default-jdk (>= 2:1.9~),
debhelper (>= 11~),
javahelper,
gradle-debian-helper,
maven-repo-helper,
libguava-java (>= 15.0),
# htsjdk and picard-tools are relased nearly together
libhtsjdk-java (>= 2.8~),
libhtsjdk-java (>= 2.14~),
# required for tests:
testng (>= 6.9.10),
r-base-core,
......
......@@ -4,7 +4,7 @@ Forwarded: not-needed
Last-Updated: 2016-07-07
--- a/build.gradle
+++ b/build.gradle
@@ -12,13 +12,7 @@
@@ -12,13 +12,7 @@ plugins {
id "java"
id 'maven'
id 'signing'
......@@ -18,37 +18,23 @@ Last-Updated: 2016-07-07
}
mainClassName = "picard.cmdline.PicardCommandLine"
@@ -31,38 +25,31 @@
@@ -31,6 +25,7 @@ repositories {
}
}
-jacocoTestReport {
- dependsOn test
- group = "Reporting"
- description = "Generate Jacoco coverage reports after running tests."
- additionalSourceDirs = files(sourceSets.main.allJava.srcDirs)
-
- reports {
- xml.enabled = true // coveralls plugin depends on xml format report
- html.enabled = true
- }
-}
-
-jacoco {
- toolVersion = "0.7.5.201505241946"
-}
-
final htsjdkVersion = System.getProperty('htsjdk.version', '2.8.0')
dependencies {
compile 'com.google.guava:guava:15.0'
compile 'com.github.samtools:htsjdk:' + htsjdkVersion
//tools dependency for doclet requires sdk devel
- compile(files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs()))
+ //compile(files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs()))
testCompile 'org.testng:testng:6.9.10'
+/*
jacocoTestReport {
dependsOn test
group = "Reporting"
@@ -46,6 +41,7 @@ jacocoTestReport {
jacoco {
toolVersion = "0.7.5.201505241946"
}
+*/
final htsjdkVersion = System.getProperty('htsjdk.version', '2.14.3')
@@ -94,9 +90,18 @@ configurations.all {
sourceCompatibility = 1.8
targetCompatibility = 1.8
......@@ -69,35 +55,19 @@ Last-Updated: 2016-07-07
logger.info("build for version:" + version)
group = 'com.github.broadinstitute'
@@ -176,7 +163,7 @@
//generate documentation
@@ -397,6 +402,7 @@ task copyPicardDoc(dependsOn: 'picardDoc
into "$htmlDir/picarddoc"
}
-task documentAll(dependsOn: ['documentCommands', 'createMetricsDoc', 'documentStandardOptions']){
+task documentAll(dependsOn: ['documentCommands', 'documentStandardOptions']){
doFirst{
htmlDirInc.mkdirs()
+/*
task updateGhPages(dependsOn: ['copyJavadoc', 'copyPicardDoc']){
outputs.dir htmlDir
}
@@ -412,3 +418,4 @@ githubPages {
into 'newdocs'
}
@@ -323,19 +310,3 @@
from 'build/docs/javadoc'
into "$htmlDir/javadoc"
}
-
-task updateGhPages(dependsOn: ['copyJavadoc', 'documentAll']){
- outputs.dir htmlDir
-}
-
-updateGhPages.finalizedBy publishGhPages
-
-githubPages {
- repoUri = 'git@github.com:broadinstitute/picard.git'
- targetBranch = 'gh-pages'
- deleteExistingFiles = false
- pages {
- from htmlDir
- into '.'
- }
-}
+*/
--- /dev/null
+++ b/gradle.properties
@@ -0,0 +1 @@
......
......@@ -2,37 +2,30 @@ Description: do not use shadowjar
Author: Sascha Steinbiss <satta@debian.org>
--- a/build.gradle
+++ b/build.gradle
@@ -56,7 +56,7 @@
@@ -108,7 +108,7 @@ group = 'com.github.broadinstitute'
defaultTasks 'all'
-task all(dependsOn: ['jar', 'distZip', 'documentAll', 'shadowJar', 'currentJar'])
+task all(dependsOn: ['jar', 'distZip', 'documentAll'])
-task all(dependsOn: ['jar', 'distZip', 'javadoc', 'shadowJar', 'barclayShadowJar', 'currentJar'])
+task all(dependsOn: ['jar', 'distZip', 'javadoc'])
jar {
manifest {
@@ -73,15 +73,15 @@
}
// Source file names for the picard command line properties file. We select and include only one of
// these two files in each jar, renamed to "picardCmdLine.properties", depending on which parser we
@@ -193,6 +193,7 @@ task picardDoc(type: Javadoc, dependsOn:
options.addStringOption("verbose")
}
-task currentJar(type: Copy){
- from shadowJar
- into new File(buildDir, "libs")
- rename { string -> "picard.jar"}
-}
+/*
task currentJar(type: Copy){
from shadowJar
into new File(buildDir, "libs")
@@ -210,8 +211,7 @@ shadowJar {
}
}
}
-
-
-shadowJar {
- finalizedBy currentJar
-}
+//task currentJar(type: Copy){
+// from shadowJar
+// into new File(buildDir, "libs")
+// rename { string -> "picard.jar"}
+//}
+
+//shadowJar {
+// finalizedBy currentJar
+//}
+*/
tasks.withType(Test) {
outputs.upToDateWhen { false } // tests will always rerun
task cloudJar(type: com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar) {
configurations = [project.configurations.cloudConfiguration]
......@@ -2,7 +2,7 @@ Description: fix data provider requirements
Author: Sascha Steinbiss <satta@debian.org>
--- a/src/test/java/picard/analysis/directed/CollectHsMetricsTest.java
+++ b/src/test/java/picard/analysis/directed/CollectHsMetricsTest.java
@@ -21,22 +21,22 @@
@@ -21,22 +21,22 @@ public class CollectHsMetricsTest extend
@DataProvider(name = "collectHsMetricsDataProvider")
public Object[][] targetedIntervalDataProvider() {
......@@ -16,20 +16,47 @@ Author: Sascha Steinbiss <satta@debian.org>
return new Object[][] {
// two reads, each has 100 bases. bases in one read are medium quality (20), in the other read poor quality (10).
// test that we exclude half of the bases
- {TEST_DIR + "/lowbaseq.sam", intervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.50, 0.0, 1000},
+ {TEST_DIR.getAbsolutePath() + "/lowbaseq.sam", intervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.50, 0.0, 1000},
- {TEST_DIR + "/lowbaseq.sam", intervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.50, 0.0, 1, 1000},
+ {TEST_DIR.getAbsolutePath() + "/lowbaseq.sam", intervals, 1, 10, true, 2, 200, 0.5, 0.0, 0.50, 0.0, 1, 1000},
// test that read 2 (with mapping quality 1) is filtered out with minimum mapping quality 2
- {TEST_DIR + "/lowmapq.sam", intervals, 2, 0, true, 2, 202, 0, 0.0, 0.505, 0.0, 1000},
+ {TEST_DIR.getAbsolutePath() + "/lowmapq.sam", intervals, 2, 0, true, 2, 202, 0.0, 0.0, 0.505, 0.0, 1000},
- {TEST_DIR + "/lowmapq.sam", intervals, 2, 0, true, 2, 202, 0, 0.0, 0.505, 0.0, 1, 1000},
+ {TEST_DIR.getAbsolutePath() + "/lowmapq.sam", intervals, 2, 0, true, 2, 202, 0, 0.0, 0.505, 0.0, 1, 1000},
// test that we clip overlapping bases
- {TEST_DIR + "/overlapping.sam", intervals, 0, 0, true, 2, 202, 0, 0.5, 0.505, 0.505, 1000},
+ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", intervals, 0, 0, true, 2, 202, 0.0, 0.5, 0.505, 0.505, 1000},
- {TEST_DIR + "/overlapping.sam", intervals, 0, 0, true, 2, 202, 0, 0.5, 0.505, 0, 1, 1000},
+ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", intervals, 0, 0, true, 2, 202, 0, 0.5, 0.505, 0, 1, 1000},
// test that we do not clip overlapping bases
- {TEST_DIR + "/overlapping.sam", intervals, 0, 0, false, 2, 202, 0, 0.0, 0.505, 0.505, 1000},
+ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", intervals, 0, 0, false, 2, 202, 0.0, 0.0, 0.505, 0.505, 1000},
- {TEST_DIR + "/overlapping.sam", intervals, 0, 0, false, 2, 202, 0, 0.0, 0.505, 0.505, 2, 1000},
+ {TEST_DIR.getAbsolutePath() + "/overlapping.sam", intervals, 0, 0, false, 2, 202, 0, 0.0, 0.505, 0.505, 2, 1000},
// A read 10 base pairs long. two intervals: one maps identically to the read, other does not overlap at all
- {TEST_DIR + "/single-short-read.sam", twoSmallIntervals, 20, 20, true, 1, 10, 0.0, 0.0, 0.5, 0.0, 1000 }
+ {TEST_DIR.getAbsolutePath() + "/single-short-read.sam", twoSmallIntervals, 20, 20, true, 1, 10, 0.0, 0.0, 0.5, 0.0, 1000 }
- {TEST_DIR + "/single-short-read.sam", twoSmallIntervals, 20, 20, true, 1, 10, 0.0, 0.0, 0.5, 0.0, 1, 1000 }
+ {TEST_DIR.getAbsolutePath() + "/single-short-read.sam", twoSmallIntervals, 20, 20, true, 1, 10, 0.0, 0.0, 0.5, 0.0, 1, 1000 }
};
}
@@ -56,7 +56,7 @@ public class CollectHsMetricsTest extend
final long maxTargetCoverage,
final int sampleSize) throws IOException {
- final File outfile = File.createTempFile("CollectHsMetrics", ".hs_metrics", TEST_DIR);
+ final File outfile = File.createTempFile("CollectHsMetrics", ".hs_metrics", TEST_DIR.getAbsolutePath());
outfile.deleteOnExit();
final String[] args = new String[] {
@@ -99,14 +99,14 @@ public class CollectHsMetricsTest extend
* Test that the depth histogram is [10,10,0,...,0]
*/
- final String input = TEST_DIR + "/single-short-read.sam";
- final String targetIntervals = TEST_DIR + "/two-small.interval_list";
+ final String input = TEST_DIR.getAbsolutePath() + "/single-short-read.sam";
+ final String targetIntervals = TEST_DIR.getAbsolutePath() + "/two-small.interval_list";
final int minimumMappingQuality = 20;
final int minimumBaseQuality = 20;
final boolean clipOverlappingReads = true;
final int sampleSize = 10;
- final File outfile = File.createTempFile("testCoverageHistogram", ".hs_metrics", TEST_DIR);
+ final File outfile = File.createTempFile("testCoverageHistogram", ".hs_metrics", TEST_DIR.getAbsolutePath());
outfile.deleteOnExit();
final String[] args = new String[] {
Author: Olivier Sallou
Last-Update: 2017-11-01 08:42:38
Description: Fix testng version
--- a/build.gradle
+++ b/build.gradle
@@ -32,7 +32,7 @@
compile 'com.github.samtools:htsjdk:' + htsjdkVersion
//tools dependency for doclet requires sdk devel
//compile(files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs()))
@@ -76,7 +76,7 @@ dependencies {
compileOnly(javadocJDKFiles)
testCompile(javadocJDKFiles)
- testCompile 'org.testng:testng:6.9.10'
+ testCompile 'org.testng:testng:debian'
testCompile 'org.apache.commons:commons-lang3:3.6'
}
sourceCompatibility = 1.8
#Mon Oct 24 16:22:03 EDT 2016
#Mon Aug 14 16:27:51 EDT 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-3.1-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-3.1-all.zip
rootProject.name = "picard"
include 'picard_gradle_v2'
/*
* The MIT License
*
* Copyright (c) 2016 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package picard.analysis;
import htsjdk.samtools.metrics.MetricsFile;
import htsjdk.samtools.reference.ReferenceSequence;
import htsjdk.samtools.util.AbstractLocusInfo;
import htsjdk.samtools.util.AbstractRecordAndOffset;
import htsjdk.samtools.util.Histogram;
import htsjdk.samtools.util.IntervalList;
import htsjdk.samtools.util.SequenceUtil;
import picard.filter.CountingFilter;
import picard.filter.CountingPairedFilter;
/**
* Class for collecting data on reference coverage, base qualities and excluded bases from one AbstractLocusInfo object for
* CollectWgsMetrics.
* <p>
* The shared code for forming result for CollectWgsMetrics is abstracted into this class.
* Classes that extend this collector implement their logic in addInfo() method.
* @author Mariia_Zueva@epam.com, EPAM Systems, Inc. <www.epam.com>
*/
public abstract class AbstractWgsMetricsCollector<T extends AbstractRecordAndOffset> {
/**
* The source CollectWgsMetrics object
*/
final CollectWgsMetrics collectWgsMetrics;
/** Count of sites with a given depth of coverage. Includes all but quality 2 bases.
* We draw depths from this histogram when we calculate the theoretical het sensitivity.
*/
protected final long[] unfilteredDepthHistogramArray;
/** Count of bases observed with a given base quality. Includes all but quality 2 bases.
* We draw bases from this histogram when we calculate the theoretical het sensitivity.
*/
protected final long[] unfilteredBaseQHistogramArray;
/**
* Count of sites with a given depth of coverage.
* Excludes bases with quality below MINIMUM_BASE_QUALITY (default 20).
*/
protected final long[] highQualityDepthHistogramArray;
/**
* Number of aligned bases that were filtered out because they were of low base quality (default is < 20).
*/
long basesExcludedByBaseq = 0;
/**
* Number of aligned bases that were filtered out because they were the second observation from an insert with overlapping reads.
*/
long basesExcludedByOverlap = 0;
/**
* Number of aligned bases that were filtered out because they would have raised coverage above the capped value (default cap = 250x).
*/
long basesExcludedByCapping = 0;
/**
* Positions with coverage exceeding this value are treated as if they had coverage at this value
*/
protected final int coverageCap;
protected final IntervalList intervals;
/**
* This value indicates that processing will stop after specified int the metric amount of genomic bases.
*/
private final boolean usingStopAfter;
/**
* The number of processed genomic bases
*/
protected long counter = 0;
/**
* Creates a collector and initializes the inner data structures
*
* @param collectWgsMetrics CollectWgsMetrics, that creates this collector
* @param coverageCap coverage cap
*/
AbstractWgsMetricsCollector(CollectWgsMetrics collectWgsMetrics, final int coverageCap, final IntervalList intervals) {
if (coverageCap <= 0) {
throw new IllegalArgumentException("Coverage cap must be positive.");
}
this.collectWgsMetrics = collectWgsMetrics;
unfilteredDepthHistogramArray = new long[coverageCap + 1];
highQualityDepthHistogramArray = new long[coverageCap + 1];
unfilteredBaseQHistogramArray = new long[Byte.MAX_VALUE];
this.coverageCap = coverageCap;
this.intervals = intervals;
this.usingStopAfter = collectWgsMetrics.STOP_AFTER > 0;
}
/**
* Accumulates the data from AbstractLocusInfo in inner structures
* @param info {@link htsjdk.samtools.util.AbstractLocusInfo} with aligned to reference position reads
* @param ref {@link htsjdk.samtools.reference.ReferenceSequence}
* @param referenceBaseN true if current the value of reference base represents a no call
*/
public abstract void addInfo(final AbstractLocusInfo<T> info, final ReferenceSequence ref, boolean referenceBaseN);
/**
* Adds collected metrics and depth histogram to file
* @param file MetricsFile for result of collector's work
* @param dupeFilter counting filter for duplicate reads
* @param mapqFilter counting filter for mapping quality
* @param pairFilter counting filter for reads without a mapped mate pair
*/
public void addToMetricsFile(final MetricsFile<CollectWgsMetrics.WgsMetrics, Integer> file,
final boolean includeBQHistogram,
final CountingFilter dupeFilter,
final CountingFilter mapqFilter,
final CountingPairedFilter pairFilter) {
final CollectWgsMetrics.WgsMetrics
metrics = getMetrics(dupeFilter, mapqFilter, pairFilter);
// add them to the file
file.addMetric(metrics);
file.addHistogram(getHighQualityDepthHistogram());
if (includeBQHistogram) addBaseQHistogram(file);
}
protected void addBaseQHistogram(final MetricsFile<CollectWgsMetrics.WgsMetrics, Integer> file) {
file.addHistogram(getUnfilteredBaseQHistogram());
}
protected Histogram<Integer> getHighQualityDepthHistogram() {
return getHistogram(highQualityDepthHistogramArray, "coverage", "high_quality_coverage_count");
}
protected Histogram<Integer> getUnfilteredDepthHistogram() {
return getHistogram(unfilteredDepthHistogramArray, "coverage", "unfiltered_coverage_count");
}
protected Histogram<Integer> getUnfilteredBaseQHistogram() {
return getHistogram(unfilteredBaseQHistogramArray, "baseq", "unfiltered_baseq_count");
}
protected Histogram<Integer> getHistogram(final long[] array, final String binLabel, final String valueLabel) {
final Histogram<Integer> histogram = new Histogram<>(binLabel, valueLabel);
for (int i = 0; i < array.length; ++i) {
histogram.increment(i, array[i]);
}
return histogram;
}
/**
* Creates CollectWgsMetrics.WgsMetrics - the object holding the result of CollectWgsMetrics
*
* @param dupeFilter counting filter for duplicate reads
* @param mapqFilter counting filter for mapping quality
* @param pairFilter counting filter for reads without a mapped mate pair
* @return CollectWgsMetrics.WgsMetrics with set fields
*/
protected CollectWgsMetrics.WgsMetrics getMetrics(final CountingFilter dupeFilter,
final CountingFilter mapqFilter,
final CountingPairedFilter pairFilter) {
return collectWgsMetrics.generateWgsMetrics(
this.intervals,
getHighQualityDepthHistogram(),
getUnfilteredDepthHistogram(),
collectWgsMetrics.getBasesExcludedBy(mapqFilter),
collectWgsMetrics.getBasesExcludedBy(dupeFilter),
collectWgsMetrics.getBasesExcludedBy(pairFilter),
basesExcludedByBaseq,
basesExcludedByOverlap,
basesExcludedByCapping,
coverageCap,
getUnfilteredBaseQHistogram(),
collectWgsMetrics.SAMPLE_SIZE
);
}
/**
* @return true, of number of processed loci exceeded the threshold, otherwise false
*/
boolean isTimeToStop(final long processedLoci) {
return usingStopAfter && processedLoci > collectWgsMetrics.STOP_AFTER - 1;
}
/**
* Sets the counter to the current number of processed loci. Counter, must be updated
* from outside, since we are skipping a no call reference positions outside of the collector
*
* @param counter number of processed loci
*/
public void setCounter(long counter) {
this.counter = counter;
}
/**
* Checks if reference base at given position is unknown.
*
* @param position to check the base
* @param ref reference sequence
* @return true if reference base at position represents a no call, otherwise false
*/
boolean isReferenceBaseN(final int position, final ReferenceSequence ref) {
final byte base = ref.getBases()[position - 1];
return SequenceUtil.isNoCall(base);
}
}
......@@ -51,7 +51,7 @@ public class AlignmentSummaryMetrics extends MultilevelMetrics {
/** The number of PF reads where PF is defined as passing Illumina's filter. */
public long PF_READS;
/** The percentage of reads that are PF (PF_READS / TOTAL_READS) */
/** The fraction of reads that are PF (PF_READS / TOTAL_READS) */
public double PCT_PF_READS;
/**
......@@ -108,7 +108,7 @@ public class AlignmentSummaryMetrics extends MultilevelMetrics {
public double PF_MISMATCH_RATE;
/**
* The percentage of bases that mismatch the reference in PF HQ aligned reads.
* The fraction of bases that mismatch the reference in PF HQ aligned reads.
*/
public double PF_HQ_ERROR_RATE;
......@@ -131,11 +131,22 @@ public class AlignmentSummaryMetrics extends MultilevelMetrics {
public long READS_ALIGNED_IN_PAIRS;
/**
* The percentage of reads whose mate pair was also aligned to the reference.
* The fraction of reads whose mate pair was also aligned to the reference.
* READS_ALIGNED_IN_PAIRS / PF_READS_ALIGNED
*/
public double PCT_READS_ALIGNED_IN_PAIRS;
/**
* The number of (primary) aligned reads that are **not** "properly" aligned in pairs (as per SAM flag 0x2).
*/
public long PF_READS_IMPROPER_PAIRS;
/**
* The fraction of (primary) reads that are *not* "properly" aligned in pairs (as per SAM flag 0x2).
* PF_READS_IMPROPER_PAIRS / PF_READS_ALIGNED
*/
public double PCT_PF_READS_IMPROPER_PAIRS;
/**
* The number of instrument cycles in which 80% or more of base calls were no-calls.
*/
......@@ -148,13 +159,13 @@ public class AlignmentSummaryMetrics extends MultilevelMetrics {
public double STRAND_BALANCE;
/**
* The percentage of reads that map outside of a maximum insert size (usually 100kb) or that have
* The fraction of reads that map outside of a maximum insert size (usually 100kb) or that have
* the two ends mapping to different chromosomes.
*/
public double PCT_CHIMERAS;
/**
* The percentage of PF reads that are unaligned and match to a known adapter sequence right from the
* The fraction of PF reads that are unaligned and match to a known adapter sequence right from the
* start of the read.
*/
public double PCT_ADAPTER;
......
......@@ -53,7 +53,7 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
private final boolean isBisulfiteSequenced;
//The minimum mapping quality a base has to meet in order to be considered high quality
private final int MAPPING_QUALITY_THRESOLD = 20;
private final int MAPPING_QUALITY_THRESHOLD = 20;
//The minimum quality a base has to meet in order to be consider hq_20
private final static int BASE_QUALITY_THRESHOLD = 20;
......@@ -110,14 +110,12 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
if (rec.getReadPairedFlag()) {
if (rec.getFirstOfPairFlag()) {
firstOfPairCollector.addRecord(rec, ref);
}
else {
} else {
secondOfPairCollector.addRecord(rec, ref);
}
pairCollector.addRecord(rec, ref);
}
else {
} else {
unpairedCollector.addRecord(rec, ref);
}
}
......@@ -155,7 +153,7 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
*/
private class IndividualAlignmentSummaryMetricsCollector {
private long numPositiveStrand = 0;
private final Histogram<Integer> readLengthHistogram = new Histogram<Integer>();
private final Histogram<Integer> readLengthHistogram = new Histogram<>();
private AlignmentSummaryMetrics metrics;
private long chimeras;
private long chimerasDenominator;
......@@ -164,9 +162,9 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
private long nonBisulfiteAlignedBases = 0;
private long hqNonBisulfiteAlignedBases = 0;
private final Histogram<Long> mismatchHistogram = new Histogram<Long>();
private final Histogram<Long> hqMismatchHistogram = new Histogram<Long>();
private final Histogram<Integer> badCycleHistogram = new Histogram<Integer>();
private final Histogram<Long> mismatchHistogram = new Histogram<>();
private final Histogram<Long> hqMismatchHistogram = new Histogram<>();
private final Histogram<Integer> badCycleHistogram = new Histogram<>();
public IndividualAlignmentSummaryMetricsCollector(final AlignmentSummaryMetrics.Category pairingCategory,
final String sample,
......@@ -201,7 +199,7 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
metrics.BAD_CYCLES = 0;
for (final Histogram.Bin<Integer> cycleBin : badCycleHistogram.values()) {
final double badCyclePercentage = cycleBin.getValue() / metrics.TOTAL_READS;
if (badCyclePercentage >= .8) {
if (badCyclePercentage >= 0.8) {
metrics.BAD_CYCLES++;
}
}
......@@ -209,6 +207,7 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
if(doRefMetrics) {
if (metrics.PF_READS > 0) metrics.PCT_PF_READS_ALIGNED = (double) metrics.PF_READS_ALIGNED / (double) metrics.PF_READS;
if (metrics.PF_READS_ALIGNED > 0) metrics.PCT_READS_ALIGNED_IN_PAIRS = (double) metrics.READS_ALIGNED_IN_PAIRS / (double) metrics.PF_READS_ALIGNED;
if (metrics.PF_READS_ALIGNED > 0) metrics.PCT_PF_READS_IMPROPER_PAIRS = (double) metrics.PF_READS_IMPROPER_PAIRS / (double) metrics.PF_READS_ALIGNED;
if (metrics.PF_READS_ALIGNED > 0) metrics.STRAND_BALANCE = numPositiveStrand / (double) metrics.PF_READS_ALIGNED;
if (this.chimerasDenominator > 0) metrics.PCT_CHIMERAS = this.chimeras / (double) this.chimerasDenominator;
......@@ -239,16 +238,16 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
if (adapterUtility.isAdapterSequence(readBases)) {
this.adapterReads++;
}
}
else if(doRefMetrics) {
} else if(doRefMetrics) {
metrics.PF_READS_ALIGNED++;
if (record.getReadPairedFlag() && !record.getProperPairFlag()) metrics.PF_READS_IMPROPER_PAIRS++;
if (!record.getReadNegativeStrandFlag()) numPositiveStrand++;
if (record.getReadPairedFlag() && !record.getMateUnmappedFlag()) {
metrics.READS_ALIGNED_IN_PAIRS++;
// Check that both ends have mapq > minimum
final Integer mateMq = record.getIntegerAttribute(SAMTag.MQ.toString());
if (mateMq == null || mateMq >= MAPPING_QUALITY_THRESOLD && record.getMappingQuality() >= MAPPING_QUALITY_THRESOLD) {
if (mateMq == null || mateMq >= MAPPING_QUALITY_THRESHOLD && record.getMappingQuality() >= MAPPING_QUALITY_THRESHOLD) {
++this.chimerasDenominator;
// With both reads mapped we can see if this pair is chimeric
......@@ -256,10 +255,9 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
++this.chimeras;
}
}
}
else { // fragment reads or read pairs with one end that maps
} else { // fragment reads or read pairs with one end that maps
// Consider chimeras that occur *within* the read using the SA tag
if (record.getMappingQuality() >= MAPPING_QUALITY_THRESOLD) {
if (record.getMappingQuality() >= MAPPING_QUALITY_THRESHOLD) {
++this.chimerasDenominator;
if (record.getAttribute(SAMTag.SA.toString()) != null) ++this.chimeras;
}
......@@ -299,18 +297,10 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
for (int i=0; i<length && refIndex+i<refLength; ++i) {
final int readBaseIndex = readIndex + i;
boolean mismatch = !SequenceUtil.basesEqual(readBases[readBaseIndex], refBases[refIndex + i]);
boolean bisulfiteBase = false;
if (mismatch && isBisulfiteSequenced &&
record.getReadNegativeStrandFlag() &&
(refBases[refIndex + i] == 'G' || refBases[refIndex + i] == 'g') &&
(readBases[readBaseIndex] == 'A' || readBases[readBaseIndex] == 'a')
|| ((!record.getReadNegativeStrandFlag()) &&
(refBases[refIndex + i] == 'C' || refBases[refIndex + i] == 'c') &&
(readBases[readBaseIndex] == 'T') || readBases[readBaseIndex] == 't')) {
final boolean bisulfiteMatch = isBisulfiteSequenced && SequenceUtil.bisulfiteBasesEqual(record.getReadNegativeStrandFlag(), readBases[readBaseIndex], refBases[readBaseIndex]);
bisulfiteBase = true;
mismatch = false;
}
final boolean bisulfiteBase = mismatch && bisulfiteMatch;
mismatch = mismatch && !bisulfiteMatch;
if (mismatch) mismatchCount++;
......@@ -348,7 +338,7 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
private boolean isHighQualityMapping(final SAMRecord record) {
return !record.getReadFailsVendorQualityCheckFlag() &&
record.getMappingQuality() >= MAPPING_QUALITY_THRESOLD;
record.getMappingQuality() >= MAPPING_QUALITY_THRESHOLD;
}
public AlignmentSummaryMetrics getMetrics() {
......
......@@ -33,10 +33,12 @@ import htsjdk.samtools.reference.ReferenceSequence;
import htsjdk.samtools.util.CollectionUtil;
import htsjdk.samtools.util.IOUtil;
import htsjdk.samtools.util.Log;
import picard.cmdline.CommandLineProgramProperties;
import picard.cmdline.Option;
import org.broadinstitute.barclay.argparser.Argument;
import org.broadinstitute.barclay.argparser.CommandLineProgramProperties;
import org.broadinstitute.barclay.help.DocumentedFeature;
import picard.cmdline.StandardOptionDefinitions;
import picard.cmdline.programgroups.Metrics;
import picard.cmdline.argumentcollections.ReferenceArgumentCollection;
import picard.cmdline.programgroups.DiagnosticsAndQCProgramGroup;
import java.io.File;
import java.util.EnumSet;
......@@ -73,10 +75,11 @@ import java.util.Set;
* @author Doug Voet (dvoet at broadinstitute dot org)
*/
@CommandLineProgramProperties(
usage = CollectAlignmentSummaryMetrics.USAGE_SUMMARY + CollectAlignmentSummaryMetrics.USAGE_DETAILS,
usageShort = CollectAlignmentSummaryMetrics.USAGE_SUMMARY,
programGroup = Metrics.class
summary = CollectAlignmentSummaryMetrics.USAGE_SUMMARY + CollectAlignmentSummaryMetrics.USAGE_DETAILS,
oneLineSummary = CollectAlignmentSummaryMetrics.USAGE_SUMMARY,
programGroup = DiagnosticsAndQCProgramGroup.class
)
@DocumentedFeature
public class CollectAlignmentSummaryMetrics extends SinglePassSamProgram {
static final String USAGE_SUMMARY = "<b>Produces a summary of alignment metrics from a SAM or BAM file.</b> ";
static final String USAGE_DETAILS = "This tool takes a SAM/BAM file input and produces metrics detailing the quality of the read " +
......@@ -101,25 +104,21 @@ public class CollectAlignmentSummaryMetrics extends SinglePassSamProgram {
private static final Log log = Log.getInstance(CollectAlignmentSummaryMetrics.class);
@Option(doc="Paired-end reads above this insert size will be considered chimeric along with inter-chromosomal pairs.")
@Argument(doc="Paired-end reads above this insert size will be considered chimeric along with inter-chromosomal pairs.")
public int MAX_INSERT_SIZE = ChimeraUtil.DEFAULT_INSERT_SIZE_LIMIT;
@Option(doc="Paired-end reads that do not have this expected orientation will be considered chimeric.")
@Argument(doc="Paired-end reads that do not have this expected orientation will be considered chimeric.")
public Set<PairOrientation> EXPECTED_PAIR_ORIENTATIONS = EnumSet.copyOf(ChimeraUtil.DEFAULT_EXPECTED_ORIENTATIONS);
@Option(doc="List of adapter sequences to use when processing the alignment metrics.")
@Argument(doc="List of adapter sequences to use when processing the alignment metrics.")
public List<String> ADAPTER_SEQUENCE = AdapterUtility.DEFAULT_ADAPTER_SEQUENCE;
@Option(shortName="LEVEL", doc="The level(s) at which to accumulate metrics.")
@Argument(shortName="LEVEL", doc="The level(s) at which to accumulate metrics.")
public Set<MetricAccumulationLevel> METRIC_ACCUMULATION_LEVEL = CollectionUtil.makeSet(MetricAccumulationLevel.ALL_READS);
@Option(shortName="BS", doc="Whether the SAM or BAM file consists of bisulfite sequenced reads.")
@Argument(shortName="BS", doc="Whether the SAM or BAM file consists of bisulfite sequenced reads.")
public boolean IS_BISULFITE_SEQUENCED = false;
//overridden to make it visible on the commandline and to change the doc.
@Option(shortName = StandardOptionDefinitions.REFERENCE_SHORT_NAME, doc = "Reference sequence file. Note that while this argument isn't required, without it only a small subset of the metrics will be calculated. Note also that if a reference sequence is provided, it must be accompanied by a sequence dictionary.", optional = true, overridable = true)
public File REFERENCE_SEQUENCE = Defaults.REFERENCE_FASTA;
private AlignmentSummaryMetricsCollector collector;
/** Required main method implementation. */
......@@ -155,4 +154,23 @@ public class CollectAlignmentSummaryMetrics extends SinglePassSamProgram {
file.write(OUTPUT);
}
//overridden to make it visible on the commandline and to change the doc.
@Override
protected ReferenceArgumentCollection makeReferenceArgumentCollection() {
return new CollectAlignmentRefArgCollection();
}
public static class CollectAlignmentRefArgCollection implements ReferenceArgumentCollection {
@Argument(shortName = StandardOptionDefinitions.REFERENCE_SHORT_NAME,
doc = "Reference sequence file. Note that while this argument isn't required, without it only a small subset of the metrics will be calculated. Note also that if a reference sequence is provided, it must be accompanied by a sequence dictionary.",
optional = true)
public File REFERENCE_SEQUENCE = Defaults.REFERENCE_FASTA;
@Override
public File getReferenceFile() {
return REFERENCE_SEQUENCE;
};
}
}