Skip to content
Commits on Source (7)
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src/java"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lib/testng/testng-5.5-jdk15.jar"/>
<classpathentry kind="lib" path="lib/ant/bcel-5.2.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>picard</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
FROM broadinstitute/java-baseimage
MAINTAINER Broad Institute DSDE <dsde-engineering@broadinstitute.org>
ARG build_command=shadowJar
ARG jar_name=picard.jar
# Install ant, git for building
RUN apt-get update && \
apt-get --no-install-recommends install -y --force-yes \
git \
r-base \
ant && \
apt-get clean autoclean && \
apt-get autoremove -y
......@@ -14,8 +18,8 @@ COPY / /usr/picard/
WORKDIR /usr/picard
# Build the distribution jar, clean up everything else
RUN ./gradlew shadowJar && \
mv build/libs/picard.jar picard.jar && \
RUN ./gradlew ${build_command} && \
mv build/libs/${jar_name} picard.jar && \
mv src/main/resources/picard/docker_helper.sh docker_helper.sh && \
./gradlew clean && \
rm -rf src && \
......
......@@ -47,7 +47,24 @@ jacoco {
toolVersion = "0.7.5.201505241946"
}
final htsjdkVersion = System.getProperty('htsjdk.version', '2.14.3')
final requiredJavaVersion = "8"
final buildPrerequisitesMessage = "See https://github.com/broadinstitute/picard/blob/master/README.md#building-picard for information on how to build picard"
// Ensure that we have the right JDK version, a clone of the git repository
def ensureBuildPrerequisites(requiredJavaVersion, buildPrerequisitesMessage) {
// Make sure we can get a ToolProvider class loader. If not we may have just a JRE, or a JDK from the future.
if (ToolProvider.getSystemToolClassLoader() == null) {
throw new GradleException(
"The ClassLoader obtained from the Java ToolProvider is null. "
+ "A Java $requiredJavaVersion JDK must be installed. $buildPrerequisitesMessage")
}
if (!file(".git").isDirectory()) {
throw new GradleException("The Picard Github repository must be cloned using \"git clone\" to run the build. "
+ "$buildPrerequisitesMessage")
}
}
ensureBuildPrerequisites(requiredJavaVersion, buildPrerequisitesMessage)
final htsjdkVersion = System.getProperty('htsjdk.version', '2.16.1')
// We use a custom shaded build of the NIO library to avoid a regression in the authentication layer.
// GATK does the same, see https://github.com/broadinstitute/gatk/issues/3591
......@@ -68,10 +85,11 @@ configurations {
}
dependencies {
compile('com.intel.gkl:gkl:0.8.2') {
compile('com.intel.gkl:gkl:0.8.5') {
exclude module: 'htsjdk'
}
compile 'com.google.guava:guava:15.0'
compile 'org.apache.commons:commons-math3:3.5'
compile 'com.github.samtools:htsjdk:' + htsjdkVersion
compile 'org.broadinstitute:barclay:2.0.0'
compileOnly googleNio
......
#!/usr/bin/env bash
# This script is used to build and deploy docker images for Picard
if [[ "$1" == "" ]]
then
echo "Usage: build_push_docker.sh <git-tag>"
exit 1
fi
declare -r TAG=${1}
declare -r PICARD_TAG=broadinstitute/picard:${TAG}
declare -r PICARD_CLOUD_TAG=us.gcr.io/broad-gotc-prod/picard-cloud:${TAG}
echo "Will build and push the following docker images:"
echo ${PICARD_TAG}
echo ${PICARD_CLOUD_TAG}
read -p "Is this really what you want to do? " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
docker build -t ${PICARD_TAG} --build-arg build_command=shadowJar --build-arg jar_name=picard.jar .
docker build -t ${PICARD_CLOUD_TAG} --build-arg build_command=cloudJar --build-arg jar_name=picardcloud.jar .
docker push ${PICARD_TAG}
gcloud docker -- push ${PICARD_CLOUD_TAG}
fi
picard-tools (2.18.14+dfsg-1) UNRELEASED; urgency=medium
* New upstream version
* Standards-Version: 4.2.1
* Fix link
Closes: #905706
-- Andreas Tille <tille@debian.org> Mon, 17 Sep 2018 15:13:19 +0200
picard-tools (2.18.2+dfsg-1) unstable; urgency=medium
* New upstream version
......
......@@ -27,7 +27,7 @@ Build-Depends: default-jdk (>= 2:1.9~),
libhtsjdk-java-doc,
libguava-java-doc,
libjs-jquery
Standards-Version: 4.1.4
Standards-Version: 4.2.1
Vcs-Browser: https://salsa.debian.org/med-team/picard-tools
Vcs-Git: https://salsa.debian.org/med-team/picard-tools.git
Homepage: http://broadinstitute.github.io/picard/
......
usr/share/doc/libpicard-java/api/jquery/external/jquery/jquery.js usr/share/javascript/jquery/jquery.min.js
usr/share/javascript/jquery/jquery.min.js usr/share/doc/libpicard-java/api/jquery/external/jquery/jquery.js
......@@ -4,7 +4,7 @@ Forwarded: not-needed
Last-Updated: 2016-07-07
--- a/build.gradle
+++ b/build.gradle
@@ -12,13 +12,7 @@
@@ -12,13 +12,7 @@ plugins {
id "java"
id 'maven'
id 'signing'
......@@ -18,7 +18,7 @@ Last-Updated: 2016-07-07
}
mainClassName = "picard.cmdline.PicardCommandLine"
@@ -31,6 +25,7 @@
@@ -31,6 +25,7 @@ repositories {
}
}
......@@ -26,15 +26,15 @@ Last-Updated: 2016-07-07
jacocoTestReport {
dependsOn test
group = "Reporting"
@@ -46,6 +41,7 @@
@@ -46,6 +41,7 @@ jacocoTestReport {
jacoco {
toolVersion = "0.7.5.201505241946"
}
+*/
final htsjdkVersion = System.getProperty('htsjdk.version', '2.14.3')
@@ -56,7 +52,7 @@
final requiredJavaVersion = "8"
final buildPrerequisitesMessage = "See https://github.com/broadinstitute/picard/blob/master/README.md#building-picard for information on how to build picard"
@@ -73,7 +69,7 @@ final googleNio = 'org.broadinstitute:go
// Get the jdk files we need to run javaDoc. We need to use these during compile, testCompile,
// test execution, and gatkDoc generation, but we don't want them as part of the runtime
// classpath and we don't want to redistribute them in the uber jar.
......@@ -43,13 +43,7 @@ Last-Updated: 2016-07-07
configurations {
cloudConfiguration {
@@ -72,13 +68,14 @@
exclude module: 'htsjdk'
}
compile 'com.google.guava:guava:15.0'
+ compile 'org.broadinstitute:gatk-native-bindings:debian'
compile 'com.github.samtools:htsjdk:' + htsjdkVersion
compile 'org.broadinstitute:barclay:2.0.0'
@@ -95,8 +91,8 @@ dependencies {
compileOnly googleNio
// javadoc utilities; compile/test only to prevent redistribution of sdk jars
......@@ -60,7 +54,7 @@ Last-Updated: 2016-07-07
testCompile 'org.testng:testng:6.9.10'
testCompile 'org.apache.commons:commons-lang3:3.6'
@@ -94,9 +91,18 @@
@@ -112,9 +108,18 @@ configurations.all {
sourceCompatibility = 1.8
targetCompatibility = 1.8
......@@ -81,7 +75,7 @@ Last-Updated: 2016-07-07
logger.info("build for version:" + version)
group = 'com.github.broadinstitute'
@@ -148,6 +154,8 @@
@@ -166,6 +171,8 @@ jar {
tasks.withType(Javadoc) {
// do this for all javadoc tasks, including gatkDoc
options.addStringOption('Xdoclint:none')
......@@ -90,7 +84,7 @@ Last-Updated: 2016-07-07
}
javadoc {
@@ -174,7 +182,8 @@
@@ -192,7 +199,8 @@ task picardDoc(type: Javadoc, dependsOn:
// The gatkDoc process instantiates any documented feature classes, so to run it we need the entire
// runtime classpath, as well as jdk javadoc files such as tools.jar, where com.sun.javadoc lives.
......@@ -100,7 +94,7 @@ Last-Updated: 2016-07-07
options.docletpath = classpath.asType(List)
options.doclet = "picard.util.help.PicardHelpDoclet"
@@ -261,6 +270,8 @@
@@ -279,6 +287,8 @@ tasks.withType(Test) {
}
}
......@@ -109,7 +103,7 @@ Last-Updated: 2016-07-07
// set heap size for the test JVM(s)
minHeapSize = "1G"
maxHeapSize = "2G"
@@ -397,6 +408,7 @@
@@ -415,6 +425,7 @@ task copyPicardDoc(dependsOn: 'picardDoc
into "$htmlDir/picarddoc"
}
......@@ -117,7 +111,7 @@ Last-Updated: 2016-07-07
task updateGhPages(dependsOn: ['copyJavadoc', 'copyPicardDoc']){
outputs.dir htmlDir
}
@@ -412,3 +424,4 @@
@@ -430,3 +441,4 @@ githubPages {
into 'newdocs'
}
}
......
......@@ -2,7 +2,7 @@ Description: do not use shadowjar
Author: Sascha Steinbiss <satta@debian.org>
--- a/build.gradle
+++ b/build.gradle
@@ -109,7 +109,7 @@
@@ -126,7 +126,7 @@ group = 'com.github.broadinstitute'
defaultTasks 'all'
......@@ -11,7 +11,7 @@ Author: Sascha Steinbiss <satta@debian.org>
// Source file names for the picard command line properties file. We select and include only one of
// these two files in each jar, renamed to "picardCmdLine.properties", depending on which parser we
@@ -197,6 +197,7 @@
@@ -214,6 +214,7 @@ task picardDoc(type: Javadoc, dependsOn:
options.addStringOption("verbose")
}
......@@ -19,7 +19,7 @@ Author: Sascha Steinbiss <satta@debian.org>
task currentJar(type: Copy){
from shadowJar
into new File(buildDir, "libs")
@@ -214,8 +215,7 @@
@@ -231,8 +232,7 @@ shadowJar {
}
}
}
......
......@@ -4,7 +4,7 @@ Description: Fix testng version
--- a/build.gradle
+++ b/build.gradle
@@ -57,9 +57,6 @@
@@ -74,9 +74,6 @@ final googleNio = 'org.broadinstitute:go
configurations {
cloudConfiguration {
extendsFrom runtime
......@@ -14,8 +14,8 @@ Description: Fix testng version
}
}
@@ -71,14 +68,13 @@
compile 'org.broadinstitute:gatk-native-bindings:debian'
@@ -88,14 +85,13 @@ dependencies {
compile 'org.apache.commons:commons-math3:3.5'
compile 'com.github.samtools:htsjdk:' + htsjdkVersion
compile 'org.broadinstitute:barclay:2.0.0'
- compileOnly googleNio
......@@ -31,7 +31,7 @@ Description: Fix testng version
}
configurations.all {
@@ -217,6 +213,7 @@
@@ -234,6 +230,7 @@ shadowJar {
}
*/
......@@ -39,7 +39,7 @@ Description: Fix testng version
task cloudJar(type: com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar) {
configurations = [project.configurations.cloudConfiguration]
from project.sourceSets.main.output
@@ -241,6 +238,7 @@
@@ -258,6 +255,7 @@ task barclayShadowJar(type: com.github.j
}
archiveName 'picardBarclay.jar'
}
......@@ -47,7 +47,7 @@ Description: Fix testng version
// Run the tests using the legacy parser only. Assumes that test code is written using
// legacy command line parser syntax.
@@ -251,6 +249,8 @@
@@ -268,6 +266,8 @@ task legacyTest(type: Test)
task barclayTest(type: Test) {
systemProperty 'picard.convertCommandLine', 'true'
systemProperty 'picard.useLegacyParser', 'false'
......@@ -56,7 +56,7 @@ Description: Fix testng version
}
// Run tests using both the legacy and barclay command line parsers.
@@ -262,6 +262,9 @@
@@ -279,6 +279,9 @@ tasks.withType(Test) {
outputs.upToDateWhen { false } // tests will always rerun
description = "Runs the unit tests"
......@@ -68,7 +68,7 @@ Description: Fix testng version
excludeGroups "slow", "broken"
--- a/src/test/java/picard/analysis/directed/CollectHsMetricsTest.java
+++ b/src/test/java/picard/analysis/directed/CollectHsMetricsTest.java
@@ -41,51 +41,6 @@
@@ -41,51 +41,6 @@ public class CollectHsMetricsTest extend
};
}
......@@ -120,7 +120,7 @@ Description: Fix testng version
@Test
public void testCoverageHistogram() throws IOException {
@@ -106,7 +61,7 @@
@@ -106,7 +61,7 @@ public class CollectHsMetricsTest extend
final boolean clipOverlappingReads = true;
final int sampleSize = 10;
......@@ -131,7 +131,7 @@ Description: Fix testng version
final String[] args = new String[] {
--- a/src/test/java/picard/analysis/artifacts/TransitionTest.java
+++ b/src/test/java/picard/analysis/artifacts/TransitionTest.java
@@ -34,13 +34,4 @@
@@ -34,13 +34,4 @@ public class TransitionTest {
return new Object[][] {{Character.MIN_VALUE}, {Transition.Base.A.base - 1}, {'Z'}, {Character.MAX_VALUE}};
}
......@@ -147,7 +147,7 @@ Description: Fix testng version
}
--- a/src/test/java/picard/util/IntervalListToolsTest.java
+++ b/src/test/java/picard/util/IntervalListToolsTest.java
@@ -80,12 +80,6 @@
@@ -80,12 +80,6 @@ public class IntervalListToolsTest exten
};
}
......
--- a/src/main/java/picard/util/LiftoverUtils.java
+++ b/src/main/java/picard/util/LiftoverUtils.java
@@ -324,7 +324,7 @@
* Note: this will modify the start/stop and alleles of this builder.
* Also note: if the reference allele does not match the reference sequence, this method will throw an exception
*
- * Based on Adrian Tan, Gonçalo R. Abecasis and Hyun Min Kang. (2015)
+ * Based on Adrian Tan, G. R. Abecasis and Hyun Min Kang. (2015)
* Unified Representation of Genetic Variants. Bioinformatics.
*
*/
Author: Olivier Sallou
Last-Update: 2018-04-23 14:51:06 +0000
Description: Fix some issues with JavaDoc
--- a/src/main/java/picard/illumina/IlluminaBasecallsToFastq.java
+++ b/src/main/java/picard/illumina/IlluminaBasecallsToFastq.java
@@ -23,7 +23,6 @@
......
Subject: unit test freezing
Description: this patch should prevent test to remain running after error,
applied upstream in master:
https://github.com/broadinstitute/picard/commit/4c2e22a9c591b8b7e8a427d33478e43d45c113b5
--- a/src/main/java/picard/illumina/NewIlluminaBasecallsConverter.java
+++ b/src/main/java/picard/illumina/NewIlluminaBasecallsConverter.java
@@ -152,7 +152,7 @@
completedWorkExecutor.shutdown();
//thread by surface tile
- final ThreadPoolExecutor tileProcessingExecutor = new ThreadPoolExecutorWithExceptions(numThreads);
+ final ThreadPoolExecutorWithExceptions tileProcessingExecutor = new ThreadPoolExecutorWithExceptions(numThreads);
for (final Integer tile : tiles) {
tileProcessingExecutor.submit(new TileProcessor(tile, barcodesFiles.get(tile)));
@@ -161,10 +161,18 @@
tileProcessingExecutor.shutdown();
awaitThreadPoolTermination("Reading executor", tileProcessingExecutor);
- awaitThreadPoolTermination("Tile completion executor", completedWorkExecutor);
- barcodeWriterThreads.values().forEach(ThreadPoolExecutor::shutdown);
- barcodeWriterThreads.forEach((barcode, executor) -> awaitThreadPoolTermination(barcode + " writer", executor));
+ // if there was an exception reading then initiate an immediate shutdown.
+ if (tileProcessingExecutor.exception != null) {
+ int tasksStillRunning = completedWorkExecutor.shutdownNow().size();
+ tasksStillRunning += barcodeWriterThreads.values().stream().mapToLong(executor -> executor.shutdownNow().size()).sum();
+ throw new PicardException("Reading executor had exceptions. There were " + tasksStillRunning
+ + " tasks were still running or queued and have been cancelled.", tileProcessingExecutor.exception);
+ } else {
+ awaitThreadPoolTermination("Tile completion executor", completedWorkExecutor);
+ barcodeWriterThreads.values().forEach(ThreadPoolExecutor::shutdown);
+ barcodeWriterThreads.forEach((barcode, executor) -> awaitThreadPoolTermination(barcode + " writer", executor));
+ }
}
private void awaitThreadPoolTermination(final String executorName, final ThreadPoolExecutor executorService) {
@@ -175,7 +183,7 @@
executorService.getQueue().size()));
}
} catch (final InterruptedException e) {
- e.printStackTrace();
+ log.error("Interrupted exception caught: ", e);
}
}
@@ -278,12 +286,12 @@
final int maxRecordsInRam =
Math.max(1, maxReadsInRamPerTile /
barcodeRecordWriterMap.size());
- return SortingCollection.newInstance(
+ return SortingCollection.newInstanceFromPaths(
outputRecordClass,
codecPrototype.clone(),
outputRecordComparator,
maxRecordsInRam,
- tmpDirs);
+ IOUtil.filesToPaths(tmpDirs));
}
}
--- a/src/main/java/picard/util/ThreadPoolExecutorWithExceptions.java
+++ b/src/main/java/picard/util/ThreadPoolExecutorWithExceptions.java
@@ -14,6 +14,7 @@
* while executing
*/
public class ThreadPoolExecutorWithExceptions extends ThreadPoolExecutor {
+ public Throwable exception = null;
/**
* Creates a fixed size thread pool executor that will rethrow exceptions from submitted jobs.
*
@@ -40,6 +41,7 @@
}
}
if (t != null) {
+ exception = t;
throw new PicardException(t.getMessage(), t);
}
}
......@@ -4,4 +4,3 @@
40-tests-fix-testng.patch
remove_google_nio.patch
fix_javadoc_ascii.patch
fix_test_freezing
addSbtPlugin("de.johoop" % "sbt-testng-plugin" % "3.0.2")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.13.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.7.1")
\ No newline at end of file
......@@ -184,8 +184,7 @@ public abstract class AbstractWgsMetricsCollector<T extends AbstractRecordAndOff
basesExcludedByCapping,
coverageCap,
getUnfilteredBaseQHistogram(),
collectWgsMetrics.SAMPLE_SIZE
);
collectWgsMetrics.SAMPLE_SIZE);
}
/**
......
......@@ -131,7 +131,7 @@ public class AlignmentSummaryMetrics extends MultilevelMetrics {
public long READS_ALIGNED_IN_PAIRS;
/**
* The fraction of reads whose mate pair was also aligned to the reference.
* The fraction of aligned reads whose mate pair was also aligned to the reference.
* READS_ALIGNED_IN_PAIRS / PF_READS_ALIGNED
*/
public double PCT_READS_ALIGNED_IN_PAIRS;
......
......@@ -49,9 +49,7 @@ import picard.filter.CountingPairedFilter;
import picard.util.MathUtil;
import java.io.File;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.*;
import static picard.cmdline.StandardOptionDefinitions.MINIMUM_MAPPING_QUALITY_SHORT_NAME;
......@@ -122,6 +120,12 @@ static final String USAGE_DETAILS = "<p>This tool collects metrics about the fra
@ArgumentCollection
protected IntervalArgumentCollection intervalArugmentCollection = makeIntervalArgumentCollection();
@Argument(doc="Output for Theoretical Sensitivity metrics.", optional = true)
public File THEORETICAL_SENSITIVITY_OUTPUT;
@Argument(doc="Allele fraction for which to calculate theoretical sensitivity.", optional = true)
public List<Double> ALLELE_FRACTION = new ArrayList<>(Arrays.asList(0.001, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.3, 0.5));
@Argument(doc = "If true, fast algorithm is used.")
public boolean USE_FAST_ALGORITHM = false;
......@@ -155,7 +159,7 @@ static final String USAGE_DETAILS = "<p>This tool collects metrics about the fra
optional = true)
public File INTERVALS;
public File getIntervalFile() { return INTERVALS; };
public File getIntervalFile() { return INTERVALS; }
};
/** Metrics for evaluating the performance of whole genome sequencing experiments. */
......@@ -434,10 +438,6 @@ static final String USAGE_DETAILS = "<p>This tool collects metrics about the fra
}
}
public static void main(final String[] args) {
new CollectWgsMetrics().instanceMainWithExit(args);
}
/** Gets the SamReader from which records will be examined. This will also set the header so that it is available in
* */
protected SamReader getSamReader() {
......@@ -455,6 +455,9 @@ static final String USAGE_DETAILS = "<p>This tool collects metrics about the fra
if (INTERVALS != null) {
IOUtil.assertFileIsReadable(INTERVALS);
}
if (THEORETICAL_SENSITIVITY_OUTPUT != null) {
IOUtil.assertFileIsWritable(THEORETICAL_SENSITIVITY_OUTPUT);
}
// it doesn't make sense for the locus accumulation cap to be lower than the coverage cap
if (LOCUS_ACCUMULATION_CAP < COVERAGE_CAP) {
......@@ -483,7 +486,7 @@ static final String USAGE_DETAILS = "<p>This tool collects metrics about the fra
iterator.setMappingQualityScoreCutoff(0); // Handled separately because we want to count bases
iterator.setIncludeNonPfReads(false);
final AbstractWgsMetricsCollector collector = getCollector(COVERAGE_CAP, getIntervalsToExamine());
final AbstractWgsMetricsCollector<?> collector = getCollector(COVERAGE_CAP, getIntervalsToExamine());
final WgsMetricsProcessor processor = getWgsMetricsProcessor(progress, refWalker, iterator, collector);
processor.processFile();
......@@ -491,6 +494,15 @@ static final String USAGE_DETAILS = "<p>This tool collects metrics about the fra
processor.addToMetricsFile(out, INCLUDE_BQ_HISTOGRAM, dupeFilter, mapqFilter, pairFilter);
out.write(OUTPUT);
if (THEORETICAL_SENSITIVITY_OUTPUT != null) {
// Write out theoretical sensitivity results.
final MetricsFile<TheoreticalSensitivityMetrics, ?> theoreticalSensitivityMetrics = getMetricsFile();
log.info("Calculating theoretical sentitivity at " + ALLELE_FRACTION.size() + " allele fractions.");
List<TheoreticalSensitivityMetrics> tsm = TheoreticalSensitivity.calculateSensitivities(SAMPLE_SIZE, collector.getUnfilteredDepthHistogram(), collector.getUnfilteredBaseQHistogram(), ALLELE_FRACTION);
theoreticalSensitivityMetrics.addAllMetrics(tsm);
theoreticalSensitivityMetrics.write(THEORETICAL_SENSITIVITY_OUTPUT);
}
return 0;
}
......
......@@ -32,7 +32,7 @@ import htsjdk.samtools.util.IOUtil;
import htsjdk.samtools.util.IntervalList;
import htsjdk.samtools.util.Log;
import htsjdk.samtools.util.StringUtil;
import org.broadinstitute.barclay.argparser.BetaFeature;
import org.broadinstitute.barclay.argparser.ExperimentalFeature;
import org.broadinstitute.barclay.help.DocumentedFeature;
import picard.PicardException;
import org.broadinstitute.barclay.argparser.Argument;
......@@ -46,7 +46,7 @@ import java.io.File;
import java.util.List;
@DocumentedFeature
@BetaFeature
@ExperimentalFeature
@CommandLineProgramProperties(
summary = CollectWgsMetricsWithNonZeroCoverage.USAGE_SUMMARY + CollectWgsMetricsWithNonZeroCoverage.USAGE_DETAILS,
oneLineSummary = CollectWgsMetricsWithNonZeroCoverage.USAGE_SUMMARY,
......@@ -54,7 +54,7 @@ import java.util.List;
)
public class CollectWgsMetricsWithNonZeroCoverage extends CollectWgsMetrics {
static final String USAGE_SUMMARY = "(Experimental) Collect metrics about coverage and performance of whole genome sequencing (WGS) experiments. ";
static final String USAGE_SUMMARY = "Collect metrics about coverage and performance of whole genome sequencing (WGS) experiments. ";
static final String USAGE_DETAILS = "This tool collects metrics about the percentages of reads that pass base- and mapping- quality " +
"filters as well as coverage (read-depth) levels. Both minimum base- and mapping-quality values as well as the maximum " +
"read depths (coverage cap) are user defined. This extends CollectWgsMetrics by including metrics related only to sites" +
......