1 В избранное 0 Ответвления 0

OSCHINA-MIRROR/mirrors-GATK

Присоединиться к Gitlife
Откройте для себя и примите участие в публичных проектах с открытым исходным кодом с участием более 10 миллионов разработчиков. Приватные репозитории также полностью бесплатны :)
Присоединиться бесплатно
Клонировать/Скачать
build.gradle 46 КБ
Копировать Редактировать Web IDE Исходные данные Просмотреть построчно История
Nalini Ganapati Отправлено 06.05.2024 21:24 24f93b5
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099
//Note: this section 'buildscript` is only for the dependencies of the buildscript itself.
// See the second 'repositories' section below for the actual dependencies of GATK itself
buildscript {
repositories {
mavenCentral()
}
}
plugins {
id "java" // set up default java compile and test tasks
id "application" // provides installDist
id 'maven-publish'
id 'signing'
id "jacoco"
id "de.undercouch.download" version "5.4.0" //used for downloading GSA lib
id "com.github.johnrengelman.shadow" version "8.1.1" //used to build the shadow and sparkJars
id "com.github.ben-manes.versions" version "0.12.0" //used for identifying dependencies that need updating
id 'com.palantir.git-version' version '0.5.1' //version helper
id 'org.sonatype.gradle.plugins.scan' version '2.6.1' // scans for security vulnerabilities in our dependencies
}
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
import java.time.format.DateTimeFormatter
application {
mainClass = "org.broadinstitute.hellbender.Main"
//Note: the test suite must use the same defaults. If you change system properties in this list you must also update the one in the test task
applicationDefaultJvmArgs = ["-Dsamjdk.use_async_io_read_samtools=false", "-Dsamjdk.use_async_io_write_samtools=true", "-Dsamjdk.use_async_io_write_tribble=false", "-Dsamjdk.compression_level=2"]
}
//Delete the windows script - we never test on Windows so let's not pretend it works
startScripts {
doLast {
delete windowsScript
}
}
task downloadGsaLibFile(type: Download) {
src 'http://cran.r-project.org/src/contrib/gsalib_2.2.1.tar.gz'
dest "src/main/resources/org/broadinstitute/hellbender/utils/R/gsalib.tar.gz"
overwrite false
}
repositories {
mavenCentral()
maven {
url "https://broadinstitute.jfrog.io/broadinstitute/libs-snapshot/" //for htsjdk snapshots
}
maven {
url "https://oss.sonatype.org/content/repositories/snapshots" //for disq snapshots
}
mavenLocal()
}
final htsjdkVersion = System.getProperty('htsjdk.version','4.1.0')
final picardVersion = System.getProperty('picard.version','3.1.1')
final barclayVersion = System.getProperty('barclay.version','5.0.0')
final sparkVersion = System.getProperty('spark.version', '3.5.0')
final hadoopVersion = System.getProperty('hadoop.version', '3.3.6')
final disqVersion = System.getProperty('disq.version','0.3.8')
final genomicsdbVersion = System.getProperty('genomicsdb.version','1.5.3')
final bigQueryVersion = System.getProperty('bigQuery.version', '2.35.0')
final bigQueryStorageVersion = System.getProperty('bigQueryStorage.version', '2.47.0')
final guavaVersion = System.getProperty('guava.version', '32.1.3-jre')
final log4j2Version = System.getProperty('log4j2Version', '2.17.1')
final testNGVersion = '7.0.0'
final googleCloudNioDependency = 'com.google.cloud:google-cloud-nio:0.127.8'
final baseJarName = 'gatk'
final secondaryBaseJarName = 'hellbender'
final docBuildDir = "$buildDir/docs"
final pythonPackageArchiveName = 'gatkPythonPackageArchive.zip'
final gatkCondaTemplate = "gatkcondaenv.yml.template"
final gatkCondaYML = "gatkcondaenv.yml"
final largeResourcesFolder = "src/main/resources/large"
final buildPrerequisitesMessage = "See https://github.com/broadinstitute/gatk#building for information on how to build GATK."
// Returns true if any files in the target folder are git-lfs stub files.
def checkForLFSStubFiles(targetFolder) {
final lfsStubFileHeader = "version https://git-lfs.github.com/spec/v1" // first line of a git-lfs stub file
def readBytesFromFile = { largeFile, n ->
final byte[] bytes = new byte[n]
largeFile.withInputStream { stream -> stream.read(bytes, 0, bytes.length) }
return bytes
}
def targetFiles = fileTree(dir: targetFolder)
return targetFiles.any() { f ->
final byte[] actualBytes = readBytesFromFile(f, lfsStubFileHeader.length());
return new String(actualBytes, "UTF-8") == lfsStubFileHeader
}
}
// if any of the large resources are lfs stub files, download them
def resolveLargeResourceStubFiles(largeResourcesFolder, buildPrerequisitesMessage) {
def execGitLFSCommand = { gitLFSExecCommand ->
println "Executing: $gitLFSExecCommand"
try {
def retCode = gitLFSExecCommand.execute().waitFor()
if (retCode.intValue() != 0) {
throw new GradleException("Execution of \"$gitLFSExecCommand\" failed with exit code: $retCode. " +
" git-lfs is required to build GATK but may not be installed. $buildPrerequisitesMessage");
}
return retCode
} catch (IOException e) {
throw new GradleException(
"An IOException occurred while attempting to execute the command $gitLFSExecCommand."
+ " git-lfs is required to build GATK but may not be installed. $buildPrerequisitesMessage", e)
}
}
// check for stub files, try to pull once if there are any, then check again
if (checkForLFSStubFiles(largeResourcesFolder)) {
final gitLFSPullLargeResources = "git lfs pull --include $largeResourcesFolder"
execGitLFSCommand(gitLFSPullLargeResources)
if (checkForLFSStubFiles(largeResourcesFolder)) {
throw new GradleException("$largeResourcesFolder contains one or more git-lfs stub files."
+ " The resource files in $largeResourcesFolder must be downloaded by running the git-lfs"
+ " command \"$gitLFSPullLargeResources\". $buildPrerequisitesMessage")
}
}
}
// Check that we're in a folder which git recognizes as a git repository.
// This works for either a standard git clone or one created with `git worktree add`
def looksLikeWereInAGitRepository(){
file(".git").isDirectory() || (file(".git").exists() && file(".git").text.startsWith("gitdir"))
}
// Ensure that we have a clone of the git repository, and resolve any required git-lfs
// resource files that are needed to run the build but are still lfs stub files.
def ensureBuildPrerequisites(largeResourcesFolder, buildPrerequisitesMessage, skipGitCheck) {
if (!JavaVersion.current().equals(JavaVersion.VERSION_17)) {
println("Warning: using Java ${JavaVersion.current()} but only Java 17 has been tested.")
}
if (!JavaVersion.current().isCompatibleWith(JavaVersion.VERSION_17)) {
throw new GradleException(
"A Java 17 compatible (Java 17 or later) version is required to build GATK, but ${JavaVersion.current()} was found. "
+ "$buildPrerequisitesMessage")
}
if (!skipGitCheck && !looksLikeWereInAGitRepository() ) {
throw new GradleException("This doesn't appear to be a git folder. " +
"The GATK Github repository must be cloned using \"git clone\" to run the build. " +
"\n$buildPrerequisitesMessage")
}
// Large runtime resource files must be present at build time to be compiled into the jar, so
// try to resolve them to real files if any of them are stubs.
resolveLargeResourceStubFiles(largeResourcesFolder, buildPrerequisitesMessage)
}
final isRelease = Boolean.getBoolean("release")
final versionOverridden = System.getProperty("versionOverride") != null
ensureBuildPrerequisites(largeResourcesFolder, buildPrerequisitesMessage, versionOverridden)
version = (versionOverridden ? System.getProperty("versionOverride") : gitVersion().replaceAll(".dirty", "")) + (isRelease ? "" : "-SNAPSHOT")
if (versionOverridden) {
println "Version number overridden as " + version
}
configurations.all {
resolutionStrategy {
// the snapshot folder contains a dev version of guava, we don't want to use that.
force 'com.google.guava:guava:' + guavaVersion
// force the htsjdk version so we don't get a different one transitively
force 'com.github.samtools:htsjdk:' + htsjdkVersion
force 'com.google.protobuf:protobuf-java:3.23.4'
// force testng dependency so we don't pick up a different version via GenomicsDB
force 'org.testng:testng:' + testNGVersion
force 'org.broadinstitute:barclay:' + barclayVersion
force 'com.twitter:chill_2.12:0.10.0'
force 'org.apache.commons:commons-math3:3.5'
// make sure we don't pick up an incorrect version of the GATK variant of the google-nio library
// via Picard, etc.
force googleCloudNioDependency
force 'com.esotericsoftware:kryo:4.0.0'
}
configurations*.exclude group: 'org.slf4j', module: 'slf4j-jdk14' //exclude this to prevent slf4j complaining about to many slf4j bindings
configurations*.exclude group: 'com.google.guava', module: 'guava-jdk5'
configurations*.exclude group: 'junit', module: 'junit'
}
tasks.withType(JavaCompile) {
options.compilerArgs = ['-proc:none', '-Xlint:all', '-Werror', '-Xdiags:verbose']
options.encoding = 'UTF-8'
}
sourceSets {
testUtils
}
// Dependency change for including MLLib
configurations {
testUtilsImplementation.extendsFrom implementation
testUtilsRuntimeClasspath.extendsFrom runtimeClasspath
testImplementation.extendsFrom testUtilsImplementation
testRuntimeClasspath.extendsFrom testUtilsRuntimeClasspath
implementation.exclude module: 'jul-to-slf4j'
implementation.exclude module: 'javax.servlet'
implementation.exclude module: 'servlet-api'
implementation.exclude group: 'com.esotericsoftware.kryo'
externalSourceConfiguration {
// External sources we need for doc and tab completion generation tasks (i.e., Picard sources)
transitive false
}
sparkConfiguration {
extendsFrom runtimeClasspath
// exclude Hadoop and Spark dependencies, since they are provided when running with Spark
// (ref: http://unethicalblogger.com/2015/07/15/gradle-goodness-excluding-depends-from-shadow.html)
exclude group: 'org.apache.hadoop'
exclude module: 'spark-core_2.12'
exclude group: 'org.slf4j'
exclude module: 'jul-to-slf4j'
exclude module: 'javax.servlet'
exclude module: 'servlet-api'
exclude group: 'com.esotericsoftware.kryo'
exclude module: 'spark-mllib_2.12.15'
exclude group: 'org.scala-lang'
exclude module: 'kryo'
}
}
dependencies {
implementation ('org.freemarker:freemarker:2.3.30')
implementation 'org.broadinstitute:barclay:' + barclayVersion
// Library for configuration:
implementation 'org.aeonbits.owner:owner:1.0.9'
implementation 'com.github.broadinstitute:picard:' + picardVersion
externalSourceConfiguration 'com.github.broadinstitute:picard:' + picardVersion + ':sources'
implementation ('org.genomicsdb:genomicsdb:' + genomicsdbVersion) {
exclude module: 'log4j-api'
exclude module: 'log4j-core'
exclude module: 'htsjdk'
exclude module: 'protobuf-java'
}
implementation 'com.opencsv:opencsv:3.4'
implementation 'com.google.guava:guava:' + guavaVersion
implementation 'com.github.samtools:htsjdk:'+ htsjdkVersion
implementation(googleCloudNioDependency)
implementation 'com.google.cloud:google-cloud-bigquery:' + bigQueryVersion
implementation 'com.google.cloud:google-cloud-bigquerystorage:' + bigQueryStorageVersion
implementation "gov.nist.math.jama:gov.nist.math.jama:1.1.1"
// this comes built-in when running on Google Dataproc, but the library
// allows us to read from GCS also when testing locally (or on non-Dataproc clusters,
// should we want to)
implementation 'com.google.cloud.bigdataoss:gcs-connector:1.9.4-hadoop3'
implementation 'org.apache.logging.log4j:log4j-api:' + log4j2Version
implementation 'org.apache.logging.log4j:log4j-core:' + log4j2Version
// include the apache commons-logging bridge that matches the log4j version we use so
// messages that originate with dependencies that use commons-logging (such as jexl)
// are routed to log4j
implementation 'org.apache.logging.log4j:log4j-jcl:' + log4j2Version
implementation 'org.apache.commons:commons-lang3:3.14.0'
implementation 'org.apache.commons:commons-math3:3.6.1'
implementation 'org.hipparchus:hipparchus-stat:2.0'
implementation 'org.apache.commons:commons-collections4:4.4'
implementation 'org.apache.commons:commons-vfs2:2.9.0'
implementation 'org.apache.commons:commons-configuration2:2.9.0'
constraints {
implementation('org.apache.commons:commons-text') {
version {
strictly '1.10.0'
}
because 'previous versions have a nasty vulnerability: https://nvd.nist.gov/vuln/detail/CVE-2022-42889'
}
}
implementation 'org.apache.httpcomponents:httpclient:4.5.12'
implementation 'commons-beanutils:commons-beanutils:1.9.3'
implementation 'commons-io:commons-io:2.5'
implementation 'org.reflections:reflections:0.9.10'
implementation 'it.unimi.dsi:fastutil:7.0.13'
implementation 'org.broadinstitute:hdf5-java-bindings:1.1.0-hdf5_2.11.0'
implementation 'org.broadinstitute:gatk-native-bindings:1.0.0'
implementation 'org.ojalgo:ojalgo:44.0.0'
implementation ('org.ojalgo:ojalgo-commons-math3:1.0.0') {
exclude group: 'org.apache.commons'
}
// TODO: migrate to mllib_2.12.15?
implementation ('org.apache.spark:spark-mllib_2.12:' + sparkVersion) {
// JUL is used by Google Dataflow as the backend logger, so exclude jul-to-slf4j to avoid a loop
exclude module: 'jul-to-slf4j'
exclude module: 'javax.servlet'
exclude module: 'servlet-api'
}
implementation 'com.thoughtworks.paranamer:paranamer:2.8'
implementation 'org.jgrapht:jgrapht-core:1.1.0'
implementation 'org.jgrapht:jgrapht-io:1.1.0'
implementation('org.disq-bio:disq:' + disqVersion)
implementation('org.apache.hadoop:hadoop-client:' + hadoopVersion) // should be a 'provided' dependency
implementation('com.github.jsr203hadoop:jsr203hadoop:1.0.3')
implementation('org.apache.orc:orc:1.6.5')
implementation('de.javakaffee:kryo-serializers:0.45') {
exclude module: 'kryo' // use Spark's version
}
// Dependency change for including MLLib
implementation('org.objenesis:objenesis:1.2')
testImplementation('org.objenesis:objenesis:2.1')
// Comment the next lines to disable native code proxies in Spark MLLib
implementation('com.github.fommil.netlib:netlib-native_ref-osx-x86_64:1.1:natives')
implementation('com.github.fommil.netlib:netlib-native_ref-linux-x86_64:1.1:natives')
implementation('com.github.fommil.netlib:netlib-native_system-linux-x86_64:1.1:natives')
implementation('com.github.fommil.netlib:netlib-native_system-osx-x86_64:1.1:natives')
implementation('com.intel.gkl:gkl:0.8.11') {
exclude module: 'htsjdk'
}
implementation 'org.broadinstitute:gatk-bwamem-jni:1.0.4'
implementation 'org.broadinstitute:gatk-fermilite-jni:1.2.0'
implementation 'org.broadinstitute:http-nio:1.1.0'
// Required for COSMIC Funcotator data source:
implementation 'org.xerial:sqlite-jdbc:3.44.1.0'
// natural sort
implementation('net.grey-panther:natural-comparator:1.1')
implementation('com.fasterxml.jackson.module:jackson-module-scala_2.12:2.9.8')
testUtilsImplementation sourceSets.main.output
testUtilsImplementation 'org.testng:testng:' + testNGVersion
testUtilsImplementation 'org.apache.hadoop:hadoop-minicluster:' + hadoopVersion
testImplementation sourceSets.testUtils.output
testImplementation "org.mockito:mockito-core:2.28.2"
testImplementation "com.google.jimfs:jimfs:1.1"
}
// This list needs to be kept in sync with the corresponding list in scripts/dockertest.gradle.
//
// The --add-open directives required to run GATK. These directives need to be:
// - included as properties in the manifest file in the jar(s)
// - passed to java via the gradle "jvmArgs" task property for any task that executes GATK code from a
// classpath that does use a jar (i.e., gradle run, test, javadoc, gatkDoc and jacoco tasks, etc.)
// - passed as java command line args when running from classes directly
// - included in any IntelliJ run/debug/profile configurations
//
final runtimeAddOpens = [
// taken from the union of everything encountered by tests, plus everything defined here:
// https://github.com/apache/spark/blob/v3.3.0/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
'java.base/java.lang=ALL-UNNAMED',
'java.base/java.lang.invoke=ALL-UNNAMED',
'java.base/java.lang.reflect=ALL-UNNAMED',
'java.base/java.io=ALL-UNNAMED',
'java.base/java.net=ALL-UNNAMED',
'java.base/java.nio=ALL-UNNAMED',
'java.base/java.util=ALL-UNNAMED',
'java.base/java.util.concurrent=ALL-UNNAMED',
'java.base/java.util.concurrent.atomic=ALL-UNNAMED',
'java.base/sun.nio.ch=ALL-UNNAMED',
'java.base/sun.nio.cs=ALL-UNNAMED',
'java.base/sun.security.action=ALL-UNNAMED',
'java.base/sun.util.calendar=ALL-UNNAMED',
'java.base/sun.nio.fs=ALL-UNNAMED',
'java.base/java.nio.channels.spi=ALL-UNNAMED',
'java.base/jdk.internal.ref=ALL-UNNAMED',
'java.base/java.lang.ref=ALL-UNNAMED',
'java.base/java.util.zip=ALL-UNNAMED',
'java.base/java.util.jar=ALL-UNNAMED',
'java.base/java.nio.file.attribute=ALL-UNNAMED',
'java.base/jdk.internal.loader=ALL-UNNAMED',
'java.base/sun.net.www.protocol.jar=ALL-UNNAMED',
'java.base/sun.invoke.util=ALL-UNNAMED',
'java.base/java.util.concurrent.locks=ALL-UNNAMED',
'java.base/java.security=ALL-UNNAMED',
'java.base/sun.reflect.annotation=ALL-UNNAMED',
'java.base/java.text=ALL-UNNAMED',
'java.base/java.nio.charset=ALL-UNNAMED',
'java.base/sun.reflect.generics.reflectiveObjects=ALL-UNNAMED',
'java.management/com.sun.jmx.mbeanserver=ALL-UNNAMED',
'java.management/javax.management=ALL-UNNAMED',
'java.base/java.util.regex=ALL-UNNAMED',
'java.base/sun.util.locale=ALL-UNNAMED',
'java.base/jdk.internal.math=ALL-UNNAMED',
'java.xml/com.sun.xml.internal.stream.util=ALL-UNNAMED',
'java.base/java.time=ALL-UNNAMED',
'java.base/sun.reflect.generics.factory=ALL-UNNAMED',
'java.base/java.nio.channels=ALL-UNNAMED',
'java.base/sun.security.util=ALL-UNNAMED',
'java.base/java.time.zone=ALL-UNNAMED',
'java.base/sun.reflect.generics.scope=ALL-UNNAMED',
'java.base/sun.reflect.generics.tree=ALL-UNNAMED',
'java.management/com.sun.jmx.interceptor=ALL-UNNAMED',
'java.management/javax.management.openmbean=ALL-UNNAMED',
'java.management/sun.management=ALL-UNNAMED',
'jdk.management/com.sun.management.internal=ALL-UNNAMED',
'jdk.management.jfr/jdk.management.jfr=ALL-UNNAMED',
'jdk.jfr/jdk.jfr.internal.management=ALL-UNNAMED',
'java.base/jdk.internal.module=ALL-UNNAMED',
'java.base/java.lang.module=ALL-UNNAMED',
'java.security.jgss/sun.security.krb5=ALL-UNNAMED'
]
final testAddOpens = [
'java.prefs/java.util.prefs=ALL-UNNAMED' // required for jacoco tasks
]
run {
// transform the list of runtime configuration --add-opens args into command line argument format
final runtimeJVMArgs = runtimeAddOpens.stream()
.flatMap(openSpec -> ['--add-opens', openSpec].stream())
.toList()
// add in any other required args
runtimeJVMArgs.add('-Dio.netty.tryReflectionSetAccessible=true')
jvmArgs = runtimeJVMArgs
}
test {
// transform the list test configuration --add-opens (which must include both the runtime and test args) into
// command line argument format
final testJVMAddOpens = new ArrayList<>();
testJVMAddOpens.addAll(runtimeAddOpens);
testJVMAddOpens.addAll(testAddOpens);
final testConfigurationJVMArgs = testJVMAddOpens.stream()
.flatMap(openSpec -> ['--add-opens', openSpec].stream())
.toList()
// add in any other required args
testConfigurationJVMArgs.add('-Dio.netty.tryReflectionSetAccessible=true')
jvmArgs = testConfigurationJVMArgs
}
processResources {
dependsOn(downloadGsaLibFile)
//add gatk launcher script to the jar as a resource
from("gatk")
}
processTestResources {
//Don't waste time packaging unnecessary test data into the test resources:
include "org/broadinstitute/hellbender/utils/config/*"
//Required for IOUtils resource tests
include "org/broadinstitute/hellbender/utils/io/*"
}
java {
sourceCompatibility = 1.17
targetCompatibility = 1.17
}
def createSymlinks(archivePath, symlinkLocation) {
exec {
commandLine 'ln', '-fs', archivePath, symlinkLocation
ignoreExitValue = false
}
}
// Suffix is what will be added to the symlink
def createGatkSymlinks(destinationDir, archiveFile, suffix, baseJarName, secondaryBaseJarName) {
def finalSuffix = (suffix == "") ? "" : ("-" + suffix)
def symlinkLocation = destinationDir.getAsFile().get().toString() + "/" + baseJarName + finalSuffix + ".jar"
def symlinkLocation2 = destinationDir.getAsFile().get().toString() + "/" + secondaryBaseJarName + finalSuffix + ".jar"
createSymlinks(archiveFile.getAsFile().get().getAbsolutePath(), symlinkLocation)
createSymlinks(archiveFile.getAsFile().get().getAbsolutePath(), symlinkLocation2)
}
logger.info("build for version:" + version)
group = 'org.broadinstitute'
tasks.withType(Jar) {
// transform the list of --add-opens directives into manifest format, which requires only the source
// package (unlike the command line equivalent, in the manifest the "ALL-UNNAMED" target is implied
// and can't be included in the manifest entry syntax)
final manifestAddOpens = runtimeAddOpens.stream()
.map(o -> o.substring(0, (o.length() - "ALL-UNNAMED".length()) - 1))
.collect(java.util.stream.Collectors.joining(' '))
manifest {
attributes 'Implementation-Title': 'The Genome Analysis Toolkit (GATK)',
'Implementation-Version': archiveVersion.get(),
'Toolkit-Short-Name' : 'GATK',
'Main-Class': application.mainClass,
'Picard-Version': picardVersion,
'htsjdk-Version': htsjdkVersion,
'Spark-Version': sparkVersion,
'Multi-Release': 'true',
'Add-Opens': manifestAddOpens
}
}
wrapper {
gradleVersion = '8.2.1'
}
tasks.withType(ShadowJar) {
from(project.sourceSets.main.output)
archiveBaseName = project.name + '-package'
mergeServiceFiles()
relocate 'com.google.common', 'org.broadinstitute.hellbender.relocated.com.google.common'
zip64 true
exclude 'log4j.properties' // from adam jar as it clashes with hellbender's log4j2.xml
exclude '**/*.SF' // these are Manifest signature files and
exclude '**/*.RSA' // keys which may accidentally be imported from other signed projects and then fail at runtime
// Suggested by the akka devs to make sure that we do not get the spark configuration error.
// http://doc.akka.io/docs/akka/snapshot/general/configuration.html#When_using_JarJar__OneJar__Assembly_or_any_jar-bundler
transform(com.github.jengelman.gradle.plugins.shadow.transformers.AppendingTransformer) {
resource = 'reference.conf'
}
}
apply from: "testsettings.gradle"
shadowJar {
configurations = [project.configurations.runtimeClasspath]
archiveClassifier = 'local'
mergeServiceFiles('reference.conf')
doLast {
// Create a symlink to the newly created jar. The name will be gatk.jar and
// it will be at the same level as the newly created jar. (overwriting symlink, if it exists)
// Please note that this will cause failures in Windows, which does not support symlinks.
createGatkSymlinks(destinationDirectory, archiveFile, "", baseJarName, secondaryBaseJarName)
}
}
task localJar{ dependsOn shadowJar }
task sparkJar(type: ShadowJar) {
group = "Shadow"
description = "Create a combined jar of project and runtime dependencies that excludes provided spark dependencies"
configurations = [project.configurations.sparkConfiguration]
archiveClassifier = 'spark'
doLast {
// Create a symlink to the newly created jar. The name will be gatk.jar and
// it will be at the same level as the newly created jar. (overwriting symlink, if it exists)
// Please note that this will cause failures in Windows, which does not support symlinks.
createGatkSymlinks(destinationDirectory, archiveFile, archiveClassifier, baseJarName, secondaryBaseJarName)
}
}
// A jar that only contains the test classes and resources (to be extracted for testing)
task shadowTestClassJar(type: ShadowJar){
group = "Shadow"
from sourceSets.test.output
description = "Create a jar that packages the compiled test classes"
archiveClassifier = "test"
}
// A minimal jar that only contains the extra dependencies needed for running the tests
task shadowTestJar(type: ShadowJar){
dependsOn 'compileTestUtilsJava', 'processTestUtilsResources'
group = "Shadow"
description = " A minimal jar that only contains the extra dependencies needed for running the tests that arent packaged in the main shadow jar"
from {
(project.configurations.testRuntimeClasspath - project.configurations.runtimeClasspath ).collect {
it.isDirectory() ? it : it.getName().endsWith(".jar") ? zipTree(it) : it
}
}
archiveClassifier = "testDependencies"
}
task collectBundleIntoDir(type: Copy) {
dependsOn shadowJar, sparkJar, 'condaEnvironmentDefinition', 'gatkTabComplete', 'gatkDoc'
doFirst {
assert file("gatk").exists()
assert file("README.md").exists()
assert file("$docBuildDir/tabCompletion/gatk-completion.sh").exists()
assert file("src/main/resources/org/broadinstitute/hellbender/utils/config/GATKConfig.properties").exists()
}
from(shadowJar.archiveFile)
from(sparkJar.archiveFile)
from("gatk")
from("README.md")
from("$docBuildDir/tabCompletion/gatk-completion.sh")
from("$docBuildDir/gatkDoc", { into("gatkdoc") })
from("src/main/resources/org/broadinstitute/hellbender/utils/config/GATKConfig.properties") {
rename 'GATKConfig.properties', 'GATKConfig.EXAMPLE.properties'
}
from("$buildDir/$pythonPackageArchiveName")
from("$buildDir/$gatkCondaYML")
from("scripts/sv", { into("scripts/sv") })
from("scripts/cnv_wdl/", { into("scripts/cnv_wdl") })
from("scripts/mutect2_wdl/", { into("scripts/mutect2_wdl") })
from("scripts/dataproc-cluster-ui", { into("scripts/")})
into "$buildDir/bundle-files-collected"
}
task bundle(type: Zip) {
dependsOn collectBundleIntoDir
zip64 true
archiveBaseName = project.name + "-" + project.version
destinationDirectory = file("$buildDir")
archiveFileName = archiveBaseName.get() + ".zip"
from("$buildDir/bundle-files-collected")
into(archiveBaseName)
doLast {
logger.lifecycle("Created GATK distribution in ${destinationDirectory}/${archiveFileName}")
}
}
jacocoTestReport {
dependsOn test
group = "Reporting"
description = "Generate Jacoco coverage reports after running tests."
getAdditionalSourceDirs().from(sourceSets.main.allJava.srcDirs)
reports {
xml.required = true
html.required = true
}
}
task condaStandardEnvironmentDefinition(type: Copy) {
from "scripts"
into buildDir
include gatkCondaTemplate
rename { file -> gatkCondaYML }
expand(["condaEnvName":"gatk",
"condaEnvDescription" : "Conda environment for GATK Python Tools"])
doLast {
logger.lifecycle("Created standard Conda environment yml file: $gatkCondaYML")
}
}
// Create GATK conda environment yml file from the conda enc template
task condaEnvironmentDefinition() {
dependsOn 'pythonPackageArchive', 'condaStandardEnvironmentDefinition'
}
// Create the Python package archive file
task pythonPackageArchive(type: Zip) {
inputs.dir "src/main/python/org/broadinstitute/hellbender/"
outputs.file pythonPackageArchiveName
doFirst {
assert file("src/main/python/org/broadinstitute/hellbender/").exists()
}
destinationDirectory = file("${buildDir}")
archiveFileName = pythonPackageArchiveName
from("src/main/python/org/broadinstitute/hellbender/")
into("/")
doLast {
logger.lifecycle("Created GATK Python package archive in ${destinationDirectory}/${archiveFileName}")
}
}
// Creates a standard, local, GATK conda environment, for use by developers during iterative
// development. Assumes conda or miniconda is already installed.
//
// NOTE: This CREATES a local conda environment; but does not *activate* it. The environment must
// be activated manually in the shell from which GATK will be run.
//
task localDevCondaEnv(type: Exec) {
dependsOn 'condaEnvironmentDefinition'
inputs.file("$buildDir/$pythonPackageArchiveName")
workingDir "$buildDir"
commandLine "conda", "env", "create", "--force", "-f", gatkCondaYML
}
task javadocJar(type: Jar, dependsOn: javadoc) {
archiveClassifier = 'javadoc'
from "$docBuildDir/javadoc"
}
task sourcesJar(type: Jar) {
from sourceSets.main.allSource
archiveClassifier = 'sources'
}
task testUtilsJar(type: Jar){
archiveBaseName = "$project.name-test-utils"
from sourceSets.testUtils.output
}
tasks.withType(Javadoc) {
// do this for all javadoc tasks, including gatkDoc
options.addStringOption('Xdoclint:none')
options.addStringOption('encoding', 'UTF-8')
}
javadoc {
// This is a hack to disable the java default javadoc lint until we fix the html formatting
// We only want to do this for the javadoc task, not gatkDoc
options.addStringOption('Xdoclint:none', '-quiet')
source = sourceSets.main.allJava + files(configurations.externalSourceConfiguration.collect { zipTree(it) })
include '**/*.java'
}
task testUtilsJavadoc(type: Javadoc) {
// This is a hack to disable the java default javadoc lint until we fix the html formatting
// We only want to do this for the javadoc task, not gatkDoc
options.addStringOption('Xdoclint:none', '-quiet')
source = sourceSets.testUtils.allJava
classpath = sourceSets.testUtils.runtimeClasspath
destinationDir = file("$docBuildDir/testUtilsJavadoc")
include '**/*.java'
}
task testUtilsJavadocJar(type: Jar, dependsOn: testUtilsJavadoc){
archiveBaseName = "$project.name-test-utils"
archiveClassifier = 'javadoc'
from "$docBuildDir/testUtilsJavadoc"
}
task testUtilsSourcesJar(type: Jar){
archiveBaseName = "$project.name-test-utils"
archiveClassifier = 'sources'
from sourceSets.testUtils.allSource
}
// Generate GATK Online Doc
task gatkDoc(type: Javadoc, dependsOn: classes) {
final File gatkDocDir = new File("$docBuildDir/gatkdoc")
doFirst {
// make sure the output folder exists or we can create it
if (!gatkDocDir.exists() && !gatkDocDir.mkdirs()) {
throw new GradleException(String.format("Failure creating folder (%s) for GATK doc output in task (%s)",
gatkDocDir.getAbsolutePath(),
it.name));
}
copy {
from('src/main/resources/org/broadinstitute/hellbender/utils/helpTemplates')
include 'gatkDoc.css'
into gatkDocDir
}
}
// Include the Picard source jar, which contains various .R, .sh, .css, .html, .xml and .MF files and
// other resources, but we only want the files that javadoc can handle, so just take the .java files.
source = sourceSets.main.allJava + files(configurations.externalSourceConfiguration.collect { zipTree(it) })
include '**/*.java'
// The gatkDoc process instantiates any documented feature classes, so to run it we need the entire
// runtime classpath.
classpath = sourceSets.main.runtimeClasspath
options.docletpath = classpath.asType(List)
options.doclet = "org.broadinstitute.hellbender.utils.help.GATKHelpDoclet"
//gradle 6.x+ defaults to setting this true which breaks the barclay doclet
options.noTimestamp(false)
outputs.dir(gatkDocDir)
options.destinationDirectory(gatkDocDir)
options.addStringOption("settings-dir", "src/main/resources/org/broadinstitute/hellbender/utils/helpTemplates");
if (project.hasProperty('phpDoc')) {
// use -PphpDoc to generate .php file extensions, otherwise rely on default of .html
final String phpExtension = "php"
options.addStringOption("output-file-extension", phpExtension)
options.addStringOption("index-file-extension", phpExtension)
}
options.addStringOption("absolute-version", getVersion())
options.addStringOption("build-timestamp", ZonedDateTime.now().format(DateTimeFormatter.RFC_1123_DATE_TIME).toString())
}
// Generate GATK Bash Tab Completion File
task gatkTabComplete(type: Javadoc, dependsOn: classes) {
final File tabCompletionDir = new File("$docBuildDir/tabCompletion")
doFirst {
// make sure the output folder exists or we can create it
if (!tabCompletionDir.exists() && !tabCompletionDir.mkdirs()) {
throw new GradleException(String.format("Failure creating folder (%s) for GATK tab completion output in task (%s)",
tabCompletionDir.getAbsolutePath(),
it.name));
}
}
// Include the Picard source jar, which contains various .R, .sh, .css, .html, .xml and .MF files and
// other resources, but we only want the files that javadoc can handle, so just take the .java files.
source = sourceSets.main.allJava + files(configurations.externalSourceConfiguration.collect { zipTree(it) })
include '**/*.java'
// The gatkDoc process instantiates any documented feature classes, so to run it we need the entire
// runtime classpath, including Picard.
classpath = sourceSets.main.runtimeClasspath
options.docletpath = classpath.asType(List)
options.doclet = "org.broadinstitute.barclay.help.BashTabCompletionDoclet"
//gradle 6.x+ defaults to setting this true which breaks the barclay doclet
options.noTimestamp(false)
outputs.dir(tabCompletionDir)
options.destinationDirectory(tabCompletionDir)
// This is a hack to work around a gross Gradle bug:
options.addStringOption('use-default-templates', '-use-default-templates')
options.addStringOption("output-file-extension", "sh")
options.addStringOption("index-file-extension", "sh")
options.addStringOption("absolute-version", getVersion())
options.addStringOption("build-timestamp", ZonedDateTime.now().format(DateTimeFormatter.RFC_1123_DATE_TIME))
options.addStringOption("caller-script-name", "gatk")
options.addStringOption("caller-pre-legal-args", "--help --list --dry-run --java-options")
options.addStringOption("caller-pre-arg-val-types", "null null null String")
options.addStringOption("caller-pre-mutex-args", "--help;list,dry-run,java-options --list;help,dry-run,java-options")
options.addStringOption("caller-pre-alias-args", "--help;-h")
options.addStringOption("caller-pre-arg-min-occurs", "0 0 0 0")
options.addStringOption("caller-pre-arg-max-occurs", "1 1 1 1")
options.addStringOption("caller-post-legal-args", "--spark-runner --spark-master --cluster --dry-run --java-options --conf --driver-memory --driver-cores --executor-memory --executor-cores --num-executors")
options.addStringOption("caller-post-arg-val-types", "String String String null String file int int int int int")
options.addStringOption("caller-post-mutex-args", "")
options.addStringOption("caller-post-alias-args", "")
options.addStringOption("caller-post-arg-min-occurs", "0 0 0 0 0 0 0 0 0 0")
options.addStringOption("caller-post-arg-max-occurs", "1 1 1 1 1 1 1 1 1 1")
}
def getWDLInputJSONTestFileNameFromWDLName(File wdlName) {
String fileWithoutExt = wdlName.name.take(wdlName.name.lastIndexOf('.'))
return new File (wdlName.getParentFile(), fileWithoutExt + "Inputs.json").getAbsolutePath()
}
// Generate GATK Tool WDL
task gatkWDLGen(type: Javadoc, dependsOn: classes) {
final File gatkWDLDir = new File("$docBuildDir/wdlGen")
outputs.dir(gatkWDLDir)
doFirst {
// make sure the output folder exists or we can create it
if (!gatkWDLDir.exists() && !gatkWDLDir.mkdirs()) {
throw new GradleException(String.format("Failure creating folder (%s) for GATK WDL output in task (%s)",
gatkWDLDir.getAbsolutePath(),
it.name));
}
copy {
from('src/main/resources/org/broadinstitute/hellbender/utils/wdlTemplates/common.html')
into gatkWDLDir
}
}
source = sourceSets.main.allJava + files(configurations.externalSourceConfiguration.collect { zipTree(it) })
include '**/*.java'
// The gatkWDLGen process instantiates any documented feature classes, so to run it we need the entire
// runtime classpath, including picard.
classpath = sourceSets.main.runtimeClasspath
options.docletpath = classpath.asType(List)
options.doclet = "org.broadinstitute.hellbender.utils.help.GATKWDLDoclet"
//gradle 6.x+ defaults to setting this true which breaks the barclay doclet
options.noTimestamp(false)
outputs.dir(gatkWDLDir)
options.destinationDirectory(gatkWDLDir)
options.addStringOption("settings-dir", "src/main/resources/org/broadinstitute/hellbender/utils/wdlTemplates");
options.addStringOption("output-file-extension", "wdl")
options.addStringOption("index-file-extension", "html")
options.addStringOption("absolute-version", getVersion())
options.addStringOption("build-timestamp", ZonedDateTime.now().format(DateTimeFormatter.RFC_1123_DATE_TIME))
// the wdl doclet will populate the test JSON input files with the name of a dummy
// file in this location, in order to satisfy cromwell's attempts to localize inputs and outputs
options.addStringOption("build-dir", System.getenv("TRAVIS_BUILD_DIR") ?: rootDir.getAbsolutePath())
}
def execWDLValidation = { validateWDL ->
println "Executing: $validateWDL"
try {
def retCode = validateWDL.execute().waitFor()
if (retCode.intValue() != 0) {
throw new GradleException("Execution of \"$validateWDL\" failed with exit code: $retCode.")
}
return retCode
} catch (IOException e) {
throw new GradleException("An IOException occurred while attempting to execute the command $validateWDL.")
}
}
task gatkValidateScriptsWdl() {
doFirst {
// running this task requires a local cromwell installation, with environment variables CROMWELL_JAR,
// WOMTOOL_JAR set to the jar locations
if (System.getenv('CROMWELL_JAR') == null || System.getenv('WOMTOOL_JAR') == null) {
throw new GradleException("Running this task requires the CROMWELL_JAR and WOMTOOL_JAR environment variables to be set")
}
}
doLast {
// Run the womtool validator on all WDL files in the 'scripts' directory
final File wdlFolder = new File(buildDir, "scripts")
def wdlFiles = fileTree(dir: wdlFolder).filter {
f -> f.getAbsolutePath().endsWith(".wdl")
}
final womtoolLocation = System.getenv('WOMTOOL_JAR')
wdlFiles.any() { wdlFile ->
final validateWDLCommand = "java -jar $womtoolLocation validate $wdlFile"
execWDLValidation(validateWDLCommand)
}
}
}
task gatkValidateGeneratedWdl(dependsOn: [gatkWDLGen, shadowJar]) {
doFirst {
// running this task requires a local cromwell installation, with environment variables CROMWELL_JAR,
// WOMTOOL_JAR set to the jar locations
if (System.getenv('CROMWELL_JAR') == null || System.getenv('WOMTOOL_JAR') == null) {
throw new GradleException("Running this task requires the CROMWELL_JAR and WOMTOOL_JAR environment variables to be set")
}
}
doLast {
// first, run the womtool validator on WDL files in the 'docs/wdlGen' directory
final File wdlGenFolder = new File("$docBuildDir/wdlGen")
def wdlFiles = fileTree(dir: wdlGenFolder).filter {
f -> !f.getAbsolutePath().endsWith(".html") && !f.getAbsolutePath().endsWith(".json")
}
final womtoolLocation = System.getenv('WOMTOOL_JAR')
wdlFiles.any() { wdlFile ->
final validateWDLCommand = "java -jar $womtoolLocation validate $wdlFile"
execWDLValidation(validateWDLCommand)
}
// now execute the *AllArgs test wdls using cromwell
wdlFiles = fileTree(dir: wdlGenFolder).filter {
f -> f.getAbsolutePath().endsWith("AllArgsTest.wdl")
}
// the test JSON input file is populated by the WDL gen process with the name of this dummy file
// to satisfy cromwell's attempt to de/localize input/output files
def buildDir = System.getenv("TRAVIS_BUILD_DIR") ?: rootDir.getAbsolutePath()
final dummyWDLTestFileName = "$buildDir/dummyWDLTestFile"
final File dummyWDLTestFile = file(dummyWDLTestFileName)
final cromwellLocation = System.getenv('CROMWELL_JAR')
try {
wdlFiles.any() { wdlFile ->
final testInputJSON = getWDLInputJSONTestFileNameFromWDLName(wdlFile)
final runWDLCommand = "java -jar $cromwellLocation run --inputs $testInputJSON $wdlFile"
execWDLValidation("touch $dummyWDLTestFileName")
execWDLValidation(runWDLCommand)
}
} finally {
// delete the dummy test file and the 'cromwell-executions' directory left behind by cromwell
dummyWDLTestFile.delete()
file("$buildDir/cromwell-executions").deleteDir()
file("$buildDir/cromwell-workflow-logs").deleteDir()
}
}
}
// scan-gradle-plugin security vulnerability scan
ossIndexAudit {
allConfigurations = false // if true includes the dependencies in all resolvable configurations. By default is false, meaning only 'compileClasspath', 'runtimeClasspath', 'releaseCompileClasspath' and 'releaseRuntimeClasspath' are considered
useCache = true // true by default
outputFormat = 'DEFAULT' // Optional, other values are: 'DEPENDENCY_GRAPH' prints dependency graph showing direct/transitive dependencies, 'JSON_CYCLONE_DX_1_4' prints a CycloneDX 1.4 SBOM in JSON format.
showAll = false // if true prints all dependencies. By default is false, meaning only dependencies with vulnerabilities will be printed.
printBanner = true // if true will print ASCII text banner. By default is true.
// ossIndexAudit can be configured to exclude vulnerabilities from matching
// excludeVulnerabilityIds = ['39d74cc8-457a-4e57-89ef-a258420138c5'] // list containing ids of vulnerabilities to be ignored
// excludeCoordinates = ['commons-fileupload:commons-fileupload:1.3'] // list containing coordinate of components which if vulnerable should be ignored
}
/**
*This specifies what artifacts will be built and uploaded when performing a maven upload.
*/
artifacts {
archives javadocJar
archives sourcesJar
archives testUtilsJar
archives testUtilsJavadocJar
archives testUtilsSourcesJar
}
//remove zip and tar added by the application plugin
configurations.archives.artifacts.removeAll {it.file =~ '.zip$'}
configurations.archives.artifacts.removeAll {it.file =~ '.tar$'}
/**
* Sign non-snapshot releases with our secret key. This should never need to be invoked directly.
*/
signing {
required { isRelease && gradle.taskGraph.hasTask("publish") }
sign publishing.publications
}
def basePomConfiguration = {
packaging = 'jar'
description = 'Development on GATK 4'
url = 'http://github.com/broadinstitute/gatk'
scm {
url = 'scm:git@github.com:broadinstitute/gatk.git'
connection = 'scm:git@github.com:broadinstitute/gatk.git'
developerConnection = 'scm:git@github.com:broadinstitute/gatk.git'
}
developers {
developer {
id = 'gatkdev'
name = 'GATK Development Team'
email = 'gatk-dev-public@broadinstitute.org'
}
}
licenses {
license {
name = 'Apache 2.0'
url = 'https://github.com/broadinstitute/gatk/blob/master/LICENSE.TXT'
distribution = 'repo'
}
}
}
//remove the shadow jar from the published component
components.java.withVariantsFromConfiguration(project.configurations.shadowRuntimeElements) {
skip()
}
publishing {
publications {
gatk(MavenPublication) {
from components.java
artifactId = "gatk"
pom basePomConfiguration
pom.name = "GATK4"
artifact sourcesJar
artifact javadocJar
}
testUtils(MavenPublication) {
artifactId = "gatk-test-utils"
pom basePomConfiguration
pom.name = "GATK4 Test Utilities"
artifact testUtilsJar
artifact testUtilsSourcesJar
artifact testUtilsJavadocJar
}
}
repositories {
maven {
name = isRelease ? "SonaType" : "Artifactory"
url = isRelease ? "https://oss.sonatype.org/service/local/staging/deploy/maven2/" : "https://broadinstitute.jfrog.io/broadinstitute/libs-snapshot-local/"
credentials {
username = isRelease ? project.findProperty("sonatypeUsername") : System.env.ARTIFACTORY_USERNAME
password = isRelease ? project.findProperty("sonatypePassword") : System.env.ARTIFACTORY_PASSWORD
}
}
}
}
publish {
doFirst {
println "Attempting to upload version:$version"
}
}
task installSpark{ dependsOn sparkJar }
task installAll{ dependsOn installSpark, installDist }
installDist.dependsOn downloadGsaLibFile
downloadGsaLibFile.dependsOn sourcesJar
// For Gradle 8 explicitly add 'condaEnvironmentDefinition' as a dependency of the following tasks.
// For more information, please refer to
// https://docs.gradle.org/8.2.1/userguide/validation_problems.html#implicit_dependency in the Gradle documentation.
['shadowJar', 'sparkJar', 'compileTestUtilsJava', 'shadowTestClassJar', 'sourcesJar', 'testUtilsSourcesJar', 'gatkDoc', 'gatkTabComplete', 'gatkWDLGen'].each {
tasks.named(it).configure {
dependsOn 'condaEnvironmentDefinition'
}
}
defaultTasks 'bundle'

Опубликовать ( 0 )

Вы можете оставить комментарий после Вход в систему

1
https://api.gitlife.ru/oschina-mirror/mirrors-GATK.git
git@api.gitlife.ru:oschina-mirror/mirrors-GATK.git
oschina-mirror
mirrors-GATK
mirrors-GATK
master