123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319 |
- /*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
- import org.apache.tools.ant.taskdefs.condition.Os
- import org.elasticsearch.gradle.info.BuildParams
- import org.elasticsearch.gradle.test.RestIntegTestTask
- import java.nio.file.Files
- import java.nio.file.Path
- import java.nio.file.Paths
- import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
- apply plugin: 'elasticsearch.test.fixtures'
- esplugin {
- description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
- classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
- }
- versions << [
- 'hadoop2': '2.8.5'
- ]
- testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs"
- configurations {
- hdfsFixture
- }
- dependencies {
- compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
- compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
- compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
- compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
- compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
- compile "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}"
- compile 'org.apache.htrace:htrace-core4:4.0.1-incubating'
- runtimeOnly 'com.google.guava:guava:11.0.2'
- compile 'com.google.protobuf:protobuf-java:2.5.0'
- compile 'commons-logging:commons-logging:1.1.3'
- compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
- compile 'commons-cli:commons-cli:1.2'
- compile "commons-codec:commons-codec:${versions.commonscodec}"
- compile 'commons-collections:commons-collections:3.2.2'
- compile 'commons-configuration:commons-configuration:1.6'
- compile 'commons-io:commons-io:2.4'
- compile 'commons-lang:commons-lang:2.6'
- compile 'javax.servlet:servlet-api:2.5'
- compile "org.slf4j:slf4j-api:${versions.slf4j}"
- compile "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
- hdfsFixture project(':test:fixtures:hdfs-fixture')
- // Set the keytab files in the classpath so that we can access them from test code without the security manager
- // freaking out.
- if (isEclipse == false) {
- testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab").parent)
- }
- }
- restResources {
- restApi {
- includeCore '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot'
- }
- }
- normalization {
- runtimeClasspath {
- // ignore generated keytab files for the purposes of build avoidance
- ignore '*.keytab'
- // ignore fixture ports file which is on the classpath primarily to pacify the security manager
- ignore 'ports'
- }
- }
- dependencyLicenses {
- mapping from: /hadoop-.*/, to: 'hadoop'
- }
- String realm = "BUILD.ELASTIC.CO"
- String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
- // Create HDFS File System Testing Fixtures for HA/Secure combinations
- for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
- def tsk = project.tasks.register(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
- dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
- executable = "${BuildParams.runtimeJavaHome}/bin/java"
- env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
- maxWaitInSeconds 60
- onlyIf { BuildParams.inFipsJvm == false }
- waitCondition = { fixture, ant ->
- // the hdfs.MiniHDFS fixture writes the ports file when
- // it's ready, so we can just wait for the file to exist
- return fixture.portsFile.exists()
- }
- final List<String> miniHDFSArgs = []
- // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
- if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
- miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}")
- }
- // If it's an HA fixture, set a nameservice to use in the JVM options
- if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
- miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
- }
- // Common options
- miniHDFSArgs.add('hdfs.MiniHDFS')
- miniHDFSArgs.add(baseDir)
- // If it's a secure fixture, then set the principal name and keytab locations to use for auth.
- if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
- miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
- miniHDFSArgs.add(
- project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
- )
- }
- args miniHDFSArgs.toArray()
- }
- // TODO: The task configuration block has side effects that require it currently to be always executed.
- // Otherwise tests start failing. Therefore we enforce the task creation for now.
- tsk.get()
- }
- Set disabledIntegTestTaskNames = []
- for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
- task "${integTestTaskName}"(type: RestIntegTestTask) {
- description = "Runs rest tests against an elasticsearch cluster with HDFS."
- dependsOn(project.bundlePlugin)
- if (disabledIntegTestTaskNames.contains(integTestTaskName)) {
- enabled = false;
- }
- if (integTestTaskName.contains("Secure")) {
- if (integTestTaskName.contains("Ha")) {
- dependsOn secureHaHdfsFixture
- } else {
- dependsOn secureHdfsFixture
- }
- }
- runner {
- onlyIf { BuildParams.inFipsJvm == false }
- if (integTestTaskName.contains("Ha")) {
- Path portsFile
- File portsFileDir = file("${workingDir}/hdfsFixture")
- if (integTestTaskName.contains("Secure")) {
- portsFile = buildDir.toPath()
- .resolve("fixtures")
- .resolve("secureHaHdfsFixture")
- .resolve("ports")
- } else {
- portsFile = buildDir.toPath()
- .resolve("fixtures")
- .resolve("haHdfsFixture")
- .resolve("ports")
- }
- nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports")
- classpath += files(portsFileDir)
- // Copy ports file to separate location which is placed on the test classpath
- doFirst {
- mkdir(portsFileDir)
- copy {
- from portsFile
- into portsFileDir
- }
- }
- }
- if (integTestTaskName.contains("Secure")) {
- if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) {
- nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
- nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
- jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
- nonInputProperties.systemProperty(
- "test.krb5.keytab.hdfs",
- project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
- )
- }
- }
- }
- }
- testClusters."${integTestTaskName}" {
- plugin(bundlePlugin.archiveFile)
- if (integTestTaskName.contains("Secure")) {
- systemProperty "java.security.krb5.conf", krb5conf
- extraConfigFile(
- "repository-hdfs/krb5.keytab",
- file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
- )
- }
- }
- }
- // Determine HDFS Fixture compatibility for the current build environment.
- boolean fixtureSupported = false
- if (Os.isFamily(Os.FAMILY_WINDOWS)) {
- // hdfs fixture will not start without hadoop native libraries on windows
- String nativePath = System.getenv("HADOOP_HOME")
- if (nativePath != null) {
- Path path = Paths.get(nativePath);
- if (Files.isDirectory(path) &&
- Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
- Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
- Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
- fixtureSupported = true
- } else {
- throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
- }
- }
- } else {
- fixtureSupported = true
- }
- boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
- if (legalPath == false) {
- fixtureSupported = false
- }
- // Always ignore HA integration tests in the normal integration test runner, they are included below as
- // part of their own HA-specific integration test tasks.
- integTest.runner {
- onlyIf { BuildParams.inFipsJvm == false }
- exclude('**/Ha*TestSuiteIT.class')
- }
- if (fixtureSupported) {
- // Check depends on the HA test. Already depends on the standard test.
- project.check.dependsOn(integTestHa)
- // Both standard and HA tests depend on their respective HDFS fixtures
- integTest.dependsOn hdfsFixture
- integTestHa.dependsOn haHdfsFixture
- // The normal test runner only runs the standard hdfs rest tests
- integTest.runner {
- systemProperty 'tests.rest.suite', 'hdfs_repository'
- }
- // Only include the HA integration tests for the HA test task
- integTestHa.runner {
- setIncludes(['**/Ha*TestSuiteIT.class'])
- }
- } else {
- if (legalPath) {
- logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
- } else {
- logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'")
- }
- // The normal integration test runner will just test that the plugin loads
- integTest.runner {
- systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
- }
- // HA fixture is unsupported. Don't run them.
- integTestHa.setEnabled(false)
- }
- check.dependsOn(integTestSecure, integTestSecureHa)
- // Run just the secure hdfs rest test suite.
- integTestSecure.runner {
- systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
- }
- // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
- integTestSecure.runner {
- exclude('**/Ha*TestSuiteIT.class')
- }
- // Only include the HA integration tests for the HA test task
- integTestSecureHa.runner {
- setIncludes(['**/Ha*TestSuiteIT.class'])
- }
- thirdPartyAudit {
- ignoreMissingClasses()
- ignoreViolations(
- // internal java api: sun.net.dns.ResolverConfiguration
- // internal java api: sun.net.util.IPAddressUtil
- 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
- // internal java api: sun.misc.Unsafe
- 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
- 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
- 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
- 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
- 'org.apache.hadoop.io.nativeio.NativeIO',
- 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
- 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
- // internal java api: sun.nio.ch.DirectBuffer
- // internal java api: sun.misc.Cleaner
- 'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
- 'org.apache.hadoop.crypto.CryptoStreamUtils',
- // internal java api: sun.misc.SignalHandler
- 'org.apache.hadoop.util.SignalLogger$Handler',
- )
- }
|