build.gradle 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315
  1. /*
  2. * Licensed to Elasticsearch under one or more contributor
  3. * license agreements. See the NOTICE file distributed with
  4. * this work for additional information regarding copyright
  5. * ownership. Elasticsearch licenses this file to you under
  6. * the Apache License, Version 2.0 (the "License"); you may
  7. * not use this file except in compliance with the License.
  8. * You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing,
  13. * software distributed under the License is distributed on an
  14. * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  15. * KIND, either express or implied. See the License for the
  16. * specific language governing permissions and limitations
  17. * under the License.
  18. */
  19. import org.apache.tools.ant.taskdefs.condition.Os
  20. import org.elasticsearch.gradle.info.BuildParams
  21. import org.elasticsearch.gradle.test.RestIntegTestTask
  22. import java.nio.file.Files
  23. import java.nio.file.Path
  24. import java.nio.file.Paths
  25. import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
  26. apply plugin: 'elasticsearch.test.fixtures'
  27. esplugin {
  28. description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
  29. classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
  30. }
  31. versions << [
  32. 'hadoop2': '2.8.5'
  33. ]
  34. testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs"
  35. configurations {
  36. hdfsFixture
  37. }
  38. dependencies {
  39. compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
  40. compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
  41. compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
  42. compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
  43. compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
  44. compile "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}"
  45. compile 'org.apache.htrace:htrace-core4:4.0.1-incubating'
  46. runtimeOnly 'com.google.guava:guava:11.0.2'
  47. compile 'com.google.protobuf:protobuf-java:2.5.0'
  48. compile 'commons-logging:commons-logging:1.1.3'
  49. compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
  50. compile 'commons-cli:commons-cli:1.2'
  51. compile "commons-codec:commons-codec:${versions.commonscodec}"
  52. compile 'commons-collections:commons-collections:3.2.2'
  53. compile 'commons-configuration:commons-configuration:1.6'
  54. compile 'commons-io:commons-io:2.4'
  55. compile 'commons-lang:commons-lang:2.6'
  56. compile 'javax.servlet:servlet-api:2.5'
  57. compile "org.slf4j:slf4j-api:${versions.slf4j}"
  58. compile "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
  59. hdfsFixture project(':test:fixtures:hdfs-fixture')
  60. // Set the keytab files in the classpath so that we can access them from test code without the security manager
  61. // freaking out.
  62. if (isEclipse == false) {
  63. testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab").parent)
  64. }
  65. }
  66. restResources {
  67. restApi {
  68. includeCore '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot'
  69. }
  70. }
  71. normalization {
  72. runtimeClasspath {
  73. // ignore generated keytab files for the purposes of build avoidance
  74. ignore '*.keytab'
  75. // ignore fixture ports file which is on the classpath primarily to pacify the security manager
  76. ignore 'ports'
  77. }
  78. }
  79. dependencyLicenses {
  80. mapping from: /hadoop-.*/, to: 'hadoop'
  81. }
  82. String realm = "BUILD.ELASTIC.CO"
  83. String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
  84. // Create HDFS File System Testing Fixtures for HA/Secure combinations
  85. for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
  86. project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
  87. dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
  88. executable = "${BuildParams.runtimeJavaHome}/bin/java"
  89. env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
  90. maxWaitInSeconds 60
  91. onlyIf { BuildParams.inFipsJvm == false }
  92. waitCondition = { fixture, ant ->
  93. // the hdfs.MiniHDFS fixture writes the ports file when
  94. // it's ready, so we can just wait for the file to exist
  95. return fixture.portsFile.exists()
  96. }
  97. final List<String> miniHDFSArgs = []
  98. // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
  99. if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
  100. miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}")
  101. }
  102. // If it's an HA fixture, set a nameservice to use in the JVM options
  103. if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
  104. miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
  105. }
  106. // Common options
  107. miniHDFSArgs.add('hdfs.MiniHDFS')
  108. miniHDFSArgs.add(baseDir)
  109. // If it's a secure fixture, then set the principal name and keytab locations to use for auth.
  110. if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
  111. miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
  112. miniHDFSArgs.add(
  113. project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
  114. )
  115. }
  116. args miniHDFSArgs.toArray()
  117. }
  118. }
  119. Set disabledIntegTestTaskNames = []
  120. for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
  121. task "${integTestTaskName}"(type: RestIntegTestTask) {
  122. description = "Runs rest tests against an elasticsearch cluster with HDFS."
  123. dependsOn(project.bundlePlugin)
  124. if (disabledIntegTestTaskNames.contains(integTestTaskName)) {
  125. enabled = false;
  126. }
  127. if (integTestTaskName.contains("Secure")) {
  128. if (integTestTaskName.contains("Ha")) {
  129. dependsOn secureHaHdfsFixture
  130. } else {
  131. dependsOn secureHdfsFixture
  132. }
  133. }
  134. runner {
  135. onlyIf { BuildParams.inFipsJvm == false }
  136. if (integTestTaskName.contains("Ha")) {
  137. Path portsFile
  138. File portsFileDir = file("${workingDir}/hdfsFixture")
  139. if (integTestTaskName.contains("Secure")) {
  140. portsFile = buildDir.toPath()
  141. .resolve("fixtures")
  142. .resolve("secureHaHdfsFixture")
  143. .resolve("ports")
  144. } else {
  145. portsFile = buildDir.toPath()
  146. .resolve("fixtures")
  147. .resolve("haHdfsFixture")
  148. .resolve("ports")
  149. }
  150. nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports")
  151. classpath += files(portsFileDir)
  152. // Copy ports file to separate location which is placed on the test classpath
  153. doFirst {
  154. mkdir(portsFileDir)
  155. copy {
  156. from portsFile
  157. into portsFileDir
  158. }
  159. }
  160. }
  161. if (integTestTaskName.contains("Secure")) {
  162. if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) {
  163. nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
  164. nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
  165. jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
  166. nonInputProperties.systemProperty(
  167. "test.krb5.keytab.hdfs",
  168. project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
  169. )
  170. }
  171. }
  172. }
  173. }
  174. testClusters."${integTestTaskName}" {
  175. plugin(file(bundlePlugin.archiveFile))
  176. if (integTestTaskName.contains("Secure")) {
  177. systemProperty "java.security.krb5.conf", krb5conf
  178. extraConfigFile(
  179. "repository-hdfs/krb5.keytab",
  180. file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
  181. )
  182. }
  183. }
  184. }
  185. // Determine HDFS Fixture compatibility for the current build environment.
  186. boolean fixtureSupported = false
  187. if (Os.isFamily(Os.FAMILY_WINDOWS)) {
  188. // hdfs fixture will not start without hadoop native libraries on windows
  189. String nativePath = System.getenv("HADOOP_HOME")
  190. if (nativePath != null) {
  191. Path path = Paths.get(nativePath);
  192. if (Files.isDirectory(path) &&
  193. Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
  194. Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
  195. Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
  196. fixtureSupported = true
  197. } else {
  198. throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
  199. }
  200. }
  201. } else {
  202. fixtureSupported = true
  203. }
  204. boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
  205. if (legalPath == false) {
  206. fixtureSupported = false
  207. }
  208. // Always ignore HA integration tests in the normal integration test runner, they are included below as
  209. // part of their own HA-specific integration test tasks.
  210. integTest.runner {
  211. onlyIf { BuildParams.inFipsJvm == false }
  212. exclude('**/Ha*TestSuiteIT.class')
  213. }
  214. if (fixtureSupported) {
  215. // Check depends on the HA test. Already depends on the standard test.
  216. project.check.dependsOn(integTestHa)
  217. // Both standard and HA tests depend on their respective HDFS fixtures
  218. integTest.dependsOn hdfsFixture
  219. integTestHa.dependsOn haHdfsFixture
  220. // The normal test runner only runs the standard hdfs rest tests
  221. integTest.runner {
  222. systemProperty 'tests.rest.suite', 'hdfs_repository'
  223. }
  224. // Only include the HA integration tests for the HA test task
  225. integTestHa.runner {
  226. setIncludes(['**/Ha*TestSuiteIT.class'])
  227. }
  228. } else {
  229. if (legalPath) {
  230. logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
  231. } else {
  232. logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'")
  233. }
  234. // The normal integration test runner will just test that the plugin loads
  235. integTest.runner {
  236. systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
  237. }
  238. // HA fixture is unsupported. Don't run them.
  239. integTestHa.setEnabled(false)
  240. }
  241. check.dependsOn(integTestSecure, integTestSecureHa)
  242. // Run just the secure hdfs rest test suite.
  243. integTestSecure.runner {
  244. systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
  245. }
  246. // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
  247. integTestSecure.runner {
  248. exclude('**/Ha*TestSuiteIT.class')
  249. }
  250. // Only include the HA integration tests for the HA test task
  251. integTestSecureHa.runner {
  252. setIncludes(['**/Ha*TestSuiteIT.class'])
  253. }
  254. thirdPartyAudit {
  255. ignoreMissingClasses()
  256. ignoreViolations(
  257. // internal java api: sun.net.dns.ResolverConfiguration
  258. // internal java api: sun.net.util.IPAddressUtil
  259. 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
  260. // internal java api: sun.misc.Unsafe
  261. 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
  262. 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
  263. 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
  264. 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
  265. 'org.apache.hadoop.io.nativeio.NativeIO',
  266. 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
  267. 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
  268. // internal java api: sun.nio.ch.DirectBuffer
  269. // internal java api: sun.misc.Cleaner
  270. 'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
  271. 'org.apache.hadoop.crypto.CryptoStreamUtils',
  272. // internal java api: sun.misc.SignalHandler
  273. 'org.apache.hadoop.util.SignalLogger$Handler',
  274. )
  275. }