build.gradle 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311
  1. /*
  2. * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
  3. * or more contributor license agreements. Licensed under the Elastic License
  4. * 2.0 and the Server Side Public License, v 1; you may not use this file except
  5. * in compliance with, at your election, the Elastic License 2.0 or the Server
  6. * Side Public License, v 1.
  7. */
  8. import org.apache.tools.ant.taskdefs.condition.Os
  9. import org.elasticsearch.gradle.info.BuildParams
  10. import org.elasticsearch.gradle.test.RestIntegTestTask
  11. import java.nio.file.Files
  12. import java.nio.file.Path
  13. import java.nio.file.Paths
  14. import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
  15. apply plugin: 'elasticsearch.test.fixtures'
  16. apply plugin: 'elasticsearch.rest-resources'
  17. apply plugin: 'elasticsearch.rest-test'
  18. esplugin {
  19. description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
  20. classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
  21. }
  22. versions << [
  23. 'hadoop2': '2.8.5'
  24. ]
  25. testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs"
  26. configurations {
  27. hdfsFixture
  28. }
  29. dependencies {
  30. api "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
  31. api project(path: 'hadoop-common', configuration: 'shadow')
  32. if (isEclipse) {
  33. /*
  34. * Eclipse can't pick up the shadow dependency so we point it at *something*
  35. * so it can compile things.
  36. */
  37. api project(path: 'hadoop-common')
  38. }
  39. api "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
  40. api "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
  41. api "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
  42. api "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}"
  43. api 'org.apache.htrace:htrace-core4:4.0.1-incubating'
  44. runtimeOnly 'com.google.guava:guava:11.0.2'
  45. api 'com.google.protobuf:protobuf-java:2.5.0'
  46. api 'commons-logging:commons-logging:1.1.3'
  47. api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
  48. api 'commons-cli:commons-cli:1.2'
  49. api "commons-codec:commons-codec:${versions.commonscodec}"
  50. api 'commons-collections:commons-collections:3.2.2'
  51. api 'commons-configuration:commons-configuration:1.6'
  52. api 'commons-io:commons-io:2.4'
  53. api 'commons-lang:commons-lang:2.6'
  54. api 'javax.servlet:servlet-api:2.5'
  55. api "org.slf4j:slf4j-api:${versions.slf4j}"
  56. api "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
  57. hdfsFixture project(':test:fixtures:hdfs-fixture')
  58. // Set the keytab files in the classpath so that we can access them from test code without the security manager
  59. // freaking out.
  60. if (isEclipse == false) {
  61. testRuntimeOnly files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab").parent)
  62. }
  63. }
  64. restResources {
  65. restApi {
  66. include '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot'
  67. }
  68. }
  69. normalization {
  70. runtimeClasspath {
  71. // ignore generated keytab files for the purposes of build avoidance
  72. ignore '*.keytab'
  73. // ignore fixture ports file which is on the classpath primarily to pacify the security manager
  74. ignore 'ports'
  75. }
  76. }
  77. tasks.named("dependencyLicenses").configure {
  78. mapping from: /hadoop-.*/, to: 'hadoop'
  79. }
  80. tasks.named("integTest").configure {
  81. dependsOn(project.tasks.named("bundlePlugin"))
  82. }
  83. testClusters.matching { it.name == "integTest" }.configureEach {
  84. plugin(project.tasks.bundlePlugin.archiveFile)
  85. }
  86. String realm = "BUILD.ELASTIC.CO"
  87. String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
  88. // Create HDFS File System Testing Fixtures for HA/Secure combinations
  89. for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
  90. project.tasks.register(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
  91. dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
  92. executable = "${BuildParams.runtimeJavaHome}/bin/java"
  93. env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
  94. maxWaitInSeconds 60
  95. onlyIf { BuildParams.inFipsJvm == false }
  96. waitCondition = { fixture, ant ->
  97. // the hdfs.MiniHDFS fixture writes the ports file when
  98. // it's ready, so we can just wait for the file to exist
  99. return fixture.portsFile.exists()
  100. }
  101. final List<String> miniHDFSArgs = []
  102. // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
  103. if (name.equals('secureHdfsFixture') || name.equals('secureHaHdfsFixture')) {
  104. miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}")
  105. onlyIf { BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 }
  106. }
  107. // If it's an HA fixture, set a nameservice to use in the JVM options
  108. if (name.equals('haHdfsFixture') || name.equals('secureHaHdfsFixture')) {
  109. miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
  110. }
  111. // Common options
  112. miniHDFSArgs.add('hdfs.MiniHDFS')
  113. miniHDFSArgs.add(baseDir)
  114. // If it's a secure fixture, then set the principal name and keytab locations to use for auth.
  115. if (name.equals('secureHdfsFixture') || name.equals('secureHaHdfsFixture')) {
  116. miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
  117. miniHDFSArgs.add(
  118. project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
  119. )
  120. }
  121. args miniHDFSArgs.toArray()
  122. }
  123. }
  124. Set disabledIntegTestTaskNames = []
  125. for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
  126. def testTask = tasks.register(integTestTaskName, RestIntegTestTask) {
  127. description = "Runs rest tests against an elasticsearch cluster with HDFS."
  128. dependsOn("bundlePlugin")
  129. if (disabledIntegTestTaskNames.contains(name)) {
  130. enabled = false;
  131. }
  132. if (name.contains("Secure")) {
  133. onlyIf { BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 }
  134. if (name.contains("Ha")) {
  135. dependsOn "secureHaHdfsFixture"
  136. } else {
  137. dependsOn "secureHdfsFixture"
  138. }
  139. }
  140. onlyIf { BuildParams.inFipsJvm == false }
  141. if (name.contains("Ha")) {
  142. Path portsFile
  143. File portsFileDir = file("${workingDir}/hdfsFixture")
  144. if (name.contains("Secure")) {
  145. portsFile = buildDir.toPath()
  146. .resolve("fixtures")
  147. .resolve("secureHaHdfsFixture")
  148. .resolve("ports")
  149. } else {
  150. portsFile = buildDir.toPath()
  151. .resolve("fixtures")
  152. .resolve("haHdfsFixture")
  153. .resolve("ports")
  154. }
  155. nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports")
  156. classpath += files(portsFileDir)
  157. // Copy ports file to separate location which is placed on the test classpath
  158. doFirst {
  159. mkdir(portsFileDir)
  160. copy {
  161. from portsFile
  162. into portsFileDir
  163. }
  164. }
  165. }
  166. if (name.contains("Secure")) {
  167. if (disabledIntegTestTaskNames.contains(name) == false) {
  168. nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
  169. nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
  170. jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
  171. nonInputProperties.systemProperty(
  172. "test.krb5.keytab.hdfs",
  173. project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
  174. )
  175. }
  176. }
  177. }
  178. testClusters.matching { it.name == testTask.name}.configureEach {
  179. plugin(bundlePlugin.archiveFile)
  180. if (integTestTaskName.contains("Secure")) {
  181. systemProperty "java.security.krb5.conf", krb5conf
  182. extraConfigFile(
  183. "repository-hdfs/krb5.keytab",
  184. file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
  185. )
  186. }
  187. }
  188. }
  189. // Determine HDFS Fixture compatibility for the current build environment.
  190. boolean fixtureSupported = false
  191. if (Os.isFamily(Os.FAMILY_WINDOWS)) {
  192. // hdfs fixture will not start without hadoop native libraries on windows
  193. String nativePath = System.getenv("HADOOP_HOME")
  194. if (nativePath != null) {
  195. Path path = Paths.get(nativePath);
  196. if (Files.isDirectory(path) &&
  197. Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
  198. Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
  199. Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
  200. fixtureSupported = true
  201. } else {
  202. throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
  203. }
  204. }
  205. } else {
  206. fixtureSupported = true
  207. }
  208. boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
  209. if (legalPath == false) {
  210. fixtureSupported = false
  211. }
  212. // Always ignore HA integration tests in the normal integration test runner, they are included below as
  213. // part of their own HA-specific integration test tasks.
  214. tasks.named("integTest").configure {
  215. onlyIf { BuildParams.inFipsJvm == false }
  216. exclude('**/Ha*TestSuiteIT.class')
  217. }
  218. if (fixtureSupported) {
  219. // Check depends on the HA test. Already depends on the standard test.
  220. tasks.named("check").configure {
  221. dependsOn("integTestHa")
  222. }
  223. // Both standard and HA tests depend on their respective HDFS fixtures
  224. tasks.named("integTest").configure {
  225. dependsOn "hdfsFixture"
  226. // The normal test runner only runs the standard hdfs rest tests
  227. systemProperty 'tests.rest.suite', 'hdfs_repository'
  228. }
  229. tasks.named("integTestHa").configure {
  230. dependsOn "haHdfsFixture"
  231. // Only include the HA integration tests for the HA test task
  232. setIncludes(['**/Ha*TestSuiteIT.class'])
  233. }
  234. } else {
  235. if (legalPath) {
  236. logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
  237. } else {
  238. logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'")
  239. }
  240. // The normal integration test runner will just test that the plugin loads
  241. tasks.named("integTest").configure {
  242. systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
  243. }
  244. // HA fixture is unsupported. Don't run them.
  245. tasks.named("integTestHa").configure {
  246. setEnabled(false)
  247. }
  248. }
  249. tasks.named("check").configure {
  250. dependsOn("integTestSecure", "integTestSecureHa")
  251. }
  252. // Run just the secure hdfs rest test suite.
  253. tasks.named("integTestSecure").configure {
  254. systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
  255. // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
  256. exclude('**/Ha*TestSuiteIT.class')
  257. }
  258. // Only include the HA integration tests for the HA test task
  259. tasks.named("integTestSecureHa").configure {
  260. setIncludes(['**/Ha*TestSuiteIT.class'])
  261. }
  262. tasks.named("thirdPartyAudit").configure {
  263. ignoreMissingClasses()
  264. ignoreViolations(
  265. // internal java api: sun.misc.Unsafe
  266. 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
  267. 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
  268. 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
  269. 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
  270. )
  271. }