build.gradle 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319
  1. /*
  2. * Licensed to Elasticsearch under one or more contributor
  3. * license agreements. See the NOTICE file distributed with
  4. * this work for additional information regarding copyright
  5. * ownership. Elasticsearch licenses this file to you under
  6. * the Apache License, Version 2.0 (the "License"); you may
  7. * not use this file except in compliance with the License.
  8. * You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing,
  13. * software distributed under the License is distributed on an
  14. * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  15. * KIND, either express or implied. See the License for the
  16. * specific language governing permissions and limitations
  17. * under the License.
  18. */
  19. import org.apache.tools.ant.taskdefs.condition.Os
  20. import org.elasticsearch.gradle.info.BuildParams
  21. import org.elasticsearch.gradle.test.RestIntegTestTask
  22. import java.nio.file.Files
  23. import java.nio.file.Path
  24. import java.nio.file.Paths
  25. import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
  26. apply plugin: 'elasticsearch.test.fixtures'
  27. esplugin {
  28. description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
  29. classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
  30. }
  31. versions << [
  32. 'hadoop2': '2.8.5'
  33. ]
  34. testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs"
  35. configurations {
  36. hdfsFixture
  37. }
  38. dependencies {
  39. compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
  40. compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
  41. compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
  42. compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
  43. compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
  44. compile "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}"
  45. compile 'org.apache.htrace:htrace-core4:4.0.1-incubating'
  46. runtimeOnly 'com.google.guava:guava:11.0.2'
  47. compile 'com.google.protobuf:protobuf-java:2.5.0'
  48. compile 'commons-logging:commons-logging:1.1.3'
  49. compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
  50. compile 'commons-cli:commons-cli:1.2'
  51. compile "commons-codec:commons-codec:${versions.commonscodec}"
  52. compile 'commons-collections:commons-collections:3.2.2'
  53. compile 'commons-configuration:commons-configuration:1.6'
  54. compile 'commons-io:commons-io:2.4'
  55. compile 'commons-lang:commons-lang:2.6'
  56. compile 'javax.servlet:servlet-api:2.5'
  57. compile "org.slf4j:slf4j-api:${versions.slf4j}"
  58. compile "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
  59. hdfsFixture project(':test:fixtures:hdfs-fixture')
  60. // Set the keytab files in the classpath so that we can access them from test code without the security manager
  61. // freaking out.
  62. if (isEclipse == false) {
  63. testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab").parent)
  64. }
  65. }
  66. restResources {
  67. restApi {
  68. includeCore '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot'
  69. }
  70. }
  71. normalization {
  72. runtimeClasspath {
  73. // ignore generated keytab files for the purposes of build avoidance
  74. ignore '*.keytab'
  75. // ignore fixture ports file which is on the classpath primarily to pacify the security manager
  76. ignore 'ports'
  77. }
  78. }
  79. dependencyLicenses {
  80. mapping from: /hadoop-.*/, to: 'hadoop'
  81. }
  82. String realm = "BUILD.ELASTIC.CO"
  83. String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
  84. // Create HDFS File System Testing Fixtures for HA/Secure combinations
  85. for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
  86. def tsk = project.tasks.register(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
  87. dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
  88. executable = "${BuildParams.runtimeJavaHome}/bin/java"
  89. env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
  90. maxWaitInSeconds 60
  91. onlyIf { BuildParams.inFipsJvm == false }
  92. waitCondition = { fixture, ant ->
  93. // the hdfs.MiniHDFS fixture writes the ports file when
  94. // it's ready, so we can just wait for the file to exist
  95. return fixture.portsFile.exists()
  96. }
  97. final List<String> miniHDFSArgs = []
  98. // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
  99. if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
  100. miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}")
  101. }
  102. // If it's an HA fixture, set a nameservice to use in the JVM options
  103. if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
  104. miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
  105. }
  106. // Common options
  107. miniHDFSArgs.add('hdfs.MiniHDFS')
  108. miniHDFSArgs.add(baseDir)
  109. // If it's a secure fixture, then set the principal name and keytab locations to use for auth.
  110. if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
  111. miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
  112. miniHDFSArgs.add(
  113. project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
  114. )
  115. }
  116. args miniHDFSArgs.toArray()
  117. }
  118. // TODO: The task configuration block has side effects that require it currently to be always executed.
  119. // Otherwise tests start failing. Therefore we enforce the task creation for now.
  120. tsk.get()
  121. }
  122. Set disabledIntegTestTaskNames = []
  123. for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
  124. task "${integTestTaskName}"(type: RestIntegTestTask) {
  125. description = "Runs rest tests against an elasticsearch cluster with HDFS."
  126. dependsOn(project.bundlePlugin)
  127. if (disabledIntegTestTaskNames.contains(integTestTaskName)) {
  128. enabled = false;
  129. }
  130. if (integTestTaskName.contains("Secure")) {
  131. if (integTestTaskName.contains("Ha")) {
  132. dependsOn secureHaHdfsFixture
  133. } else {
  134. dependsOn secureHdfsFixture
  135. }
  136. }
  137. runner {
  138. onlyIf { BuildParams.inFipsJvm == false }
  139. if (integTestTaskName.contains("Ha")) {
  140. Path portsFile
  141. File portsFileDir = file("${workingDir}/hdfsFixture")
  142. if (integTestTaskName.contains("Secure")) {
  143. portsFile = buildDir.toPath()
  144. .resolve("fixtures")
  145. .resolve("secureHaHdfsFixture")
  146. .resolve("ports")
  147. } else {
  148. portsFile = buildDir.toPath()
  149. .resolve("fixtures")
  150. .resolve("haHdfsFixture")
  151. .resolve("ports")
  152. }
  153. nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports")
  154. classpath += files(portsFileDir)
  155. // Copy ports file to separate location which is placed on the test classpath
  156. doFirst {
  157. mkdir(portsFileDir)
  158. copy {
  159. from portsFile
  160. into portsFileDir
  161. }
  162. }
  163. }
  164. if (integTestTaskName.contains("Secure")) {
  165. if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) {
  166. nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
  167. nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
  168. jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
  169. nonInputProperties.systemProperty(
  170. "test.krb5.keytab.hdfs",
  171. project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
  172. )
  173. }
  174. }
  175. }
  176. }
  177. testClusters."${integTestTaskName}" {
  178. plugin(bundlePlugin.archiveFile)
  179. if (integTestTaskName.contains("Secure")) {
  180. systemProperty "java.security.krb5.conf", krb5conf
  181. extraConfigFile(
  182. "repository-hdfs/krb5.keytab",
  183. file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
  184. )
  185. }
  186. }
  187. }
  188. // Determine HDFS Fixture compatibility for the current build environment.
  189. boolean fixtureSupported = false
  190. if (Os.isFamily(Os.FAMILY_WINDOWS)) {
  191. // hdfs fixture will not start without hadoop native libraries on windows
  192. String nativePath = System.getenv("HADOOP_HOME")
  193. if (nativePath != null) {
  194. Path path = Paths.get(nativePath);
  195. if (Files.isDirectory(path) &&
  196. Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
  197. Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
  198. Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
  199. fixtureSupported = true
  200. } else {
  201. throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
  202. }
  203. }
  204. } else {
  205. fixtureSupported = true
  206. }
  207. boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
  208. if (legalPath == false) {
  209. fixtureSupported = false
  210. }
  211. // Always ignore HA integration tests in the normal integration test runner, they are included below as
  212. // part of their own HA-specific integration test tasks.
  213. integTest.runner {
  214. onlyIf { BuildParams.inFipsJvm == false }
  215. exclude('**/Ha*TestSuiteIT.class')
  216. }
  217. if (fixtureSupported) {
  218. // Check depends on the HA test. Already depends on the standard test.
  219. project.check.dependsOn(integTestHa)
  220. // Both standard and HA tests depend on their respective HDFS fixtures
  221. integTest.dependsOn hdfsFixture
  222. integTestHa.dependsOn haHdfsFixture
  223. // The normal test runner only runs the standard hdfs rest tests
  224. integTest.runner {
  225. systemProperty 'tests.rest.suite', 'hdfs_repository'
  226. }
  227. // Only include the HA integration tests for the HA test task
  228. integTestHa.runner {
  229. setIncludes(['**/Ha*TestSuiteIT.class'])
  230. }
  231. } else {
  232. if (legalPath) {
  233. logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
  234. } else {
  235. logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'")
  236. }
  237. // The normal integration test runner will just test that the plugin loads
  238. integTest.runner {
  239. systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
  240. }
  241. // HA fixture is unsupported. Don't run them.
  242. integTestHa.setEnabled(false)
  243. }
  244. check.dependsOn(integTestSecure, integTestSecureHa)
  245. // Run just the secure hdfs rest test suite.
  246. integTestSecure.runner {
  247. systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
  248. }
  249. // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
  250. integTestSecure.runner {
  251. exclude('**/Ha*TestSuiteIT.class')
  252. }
  253. // Only include the HA integration tests for the HA test task
  254. integTestSecureHa.runner {
  255. setIncludes(['**/Ha*TestSuiteIT.class'])
  256. }
  257. thirdPartyAudit {
  258. ignoreMissingClasses()
  259. ignoreViolations(
  260. // internal java api: sun.net.dns.ResolverConfiguration
  261. // internal java api: sun.net.util.IPAddressUtil
  262. 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
  263. // internal java api: sun.misc.Unsafe
  264. 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
  265. 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
  266. 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
  267. 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
  268. 'org.apache.hadoop.io.nativeio.NativeIO',
  269. 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
  270. 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
  271. // internal java api: sun.nio.ch.DirectBuffer
  272. // internal java api: sun.misc.Cleaner
  273. 'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
  274. 'org.apache.hadoop.crypto.CryptoStreamUtils',
  275. // internal java api: sun.misc.SignalHandler
  276. 'org.apache.hadoop.util.SignalLogger$Handler',
  277. )
  278. }