Browse Source

Merge pull request ESQL-1265 from elastic/main

🤖 ESQL: Merge upstream
elasticsearchmachine 2 years ago
parent
commit
2a0b1acadb
99 changed files with 2005 additions and 777 deletions
  1. 1 1
      build-tools-internal/version.properties
  2. 5 0
      docs/changelog/96262.yaml
  3. 5 0
      docs/changelog/96550.yaml
  4. 6 0
      docs/changelog/96613.yaml
  5. 5 0
      docs/changelog/96741.yaml
  6. 65 0
      docs/reference/cluster/cluster-info.asciidoc
  7. 0 2
      docs/reference/search/search-your-data/knn-search.asciidoc
  8. 72 72
      gradle/verification-metadata.xml
  9. 1 0
      modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
  10. 2 1
      rest-api-spec/src/main/resources/rest-api-spec/api/cluster.info.json
  11. 1 0
      rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml
  12. 16 0
      rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml
  13. 18 7
      server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java
  14. 3 3
      server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java
  15. 9 4
      server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java
  16. 4 1
      server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java
  17. 15 2
      server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
  18. 10 1
      server/src/main/java/org/elasticsearch/index/IndexSettings.java
  19. 43 1
      server/src/main/java/org/elasticsearch/index/engine/Engine.java
  20. 56 44
      server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
  21. 15 9
      server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java
  22. 1 0
      server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java
  23. 1 0
      server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java
  24. 1 0
      server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java
  25. 12 4
      server/src/main/java/org/elasticsearch/rest/action/info/RestClusterInfoAction.java
  26. 12 0
      server/src/main/java/org/elasticsearch/script/ScriptContextStats.java
  27. 26 0
      server/src/main/java/org/elasticsearch/script/ScriptStats.java
  28. 9 0
      server/src/main/java/org/elasticsearch/script/TimeSeries.java
  29. 6 11
      server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java
  30. 16 10
      server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java
  31. 12 7
      server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
  32. 58 0
      server/src/test/java/org/elasticsearch/script/ScriptContextStatsTests.java
  33. 34 0
      server/src/test/java/org/elasticsearch/script/ScriptStatsTests.java
  34. 37 0
      server/src/test/java/org/elasticsearch/script/TimeSeriesTests.java
  35. 3 1
      test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java
  36. 30 0
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java
  37. 73 0
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java
  38. 92 0
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationUpdate.java
  39. 2 0
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NlpConfigUpdate.java
  40. 8 1
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java
  41. 17 5
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java
  42. 2 1
      x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/ilm-policy/profiling-60-days.json
  43. 6 2
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/AbstractNlpConfigUpdateTestCase.java
  44. 86 0
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java
  45. 83 0
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationUpdateTests.java
  46. 2 0
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigTestScaffolding.java
  47. 4 1
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NlpConfigUpdateTests.java
  48. 7 1
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java
  49. 7 1
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java
  50. 11 1
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java
  51. 209 0
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java
  52. 21 0
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TestRegistryWithCustomPlugin.java
  53. 22 0
      x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/core/template/custom-plugin-policy.json
  54. 4 4
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java
  55. 3 3
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java
  56. 7 3
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java
  57. 7 3
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java
  58. 7 3
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java
  59. 7 3
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java
  60. 2 12
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java
  61. 2 12
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java
  62. 2 22
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java
  63. 2 12
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java
  64. 2 1
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestDeleteSearchApplicationAction.java
  65. 2 1
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestGetSearchApplicationAction.java
  66. 2 1
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestListSearchApplicationAction.java
  67. 2 1
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationAction.java
  68. 2 1
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestQuerySearchApplicationAction.java
  69. 2 1
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryAction.java
  70. 0 9
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/utils/LicenseUtils.java
  71. 39 0
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/AbstractRestEnterpriseSearchActionTests.java
  72. 3 3
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java
  73. 24 0
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionActionTests.java
  74. 24 0
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionActionTests.java
  75. 37 0
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventActionTests.java
  76. 32 0
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionActionTests.java
  77. 0 91
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionActionTests.java
  78. 0 83
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionActionTests.java
  79. 0 68
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventActionTests.java
  80. 0 87
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionActionTests.java
  81. 10 24
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestDeleteSearchApplicationActionTests.java
  82. 11 24
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestGetSearchApplicationActionTests.java
  83. 8 22
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestListSearchApplicationActionTests.java
  84. 13 25
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationActionTests.java
  85. 10 24
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestQuerySearchApplicationActionTests.java
  86. 10 25
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryActionTests.java
  87. 1 0
      x-pack/plugin/ml/build.gradle
  88. 1 0
      x-pack/plugin/ml/src/main/java/module-info.java
  89. 77 0
      x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertJapaneseTokenizer.java
  90. 19 1
      x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java
  91. 31 0
      x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/JapaneseWordPieceAnalyzer.java
  92. 4 0
      x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java
  93. 6 1
      x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java
  94. 103 0
      x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertJapaneseTokenizerTests.java
  95. 24 0
      x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java
  96. 65 0
      x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizerTests.java
  97. 32 3
      x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistry.java
  98. 81 10
      x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistryTests.java
  99. 23 0
      x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml

+ 1 - 1
build-tools-internal/version.properties

@@ -1,5 +1,5 @@
 elasticsearch     = 8.9.0
-lucene            = 9.7.0-snapshot-24df30cca69
+lucene            = 9.7.0-snapshot-41cd1f7a88c
 
 bundled_jdk_vendor = openjdk
 bundled_jdk = 20.0.1+9@b4887098932d415489976708ad6d1a4b

+ 5 - 0
docs/changelog/96262.yaml

@@ -0,0 +1,5 @@
+pr: 96262
+summary: Fix `retry_on_conflict` parameter in update API to not retry indefinitely
+area: CRUD
+type: bug
+issues: []

+ 5 - 0
docs/changelog/96550.yaml

@@ -0,0 +1,5 @@
+pr: 96550
+summary: "[Profiling] Allow to upgrade managed ILM policy"
+area: Application
+type: enhancement
+issues: []

+ 6 - 0
docs/changelog/96613.yaml

@@ -0,0 +1,6 @@
+pr: 96613
+summary: Add `script` information to the cluster info endpoint
+area: Stats
+type: enhancement
+issues:
+ - 95394

+ 5 - 0
docs/changelog/96741.yaml

@@ -0,0 +1,5 @@
+pr: 96741
+summary: Upgrade to new lucene snapshot 9.7.0-snapshot-a8602d6ef88
+area: Search
+type: upgrade
+issues: []

+ 65 - 0
docs/reference/cluster/cluster-info.asciidoc

@@ -44,6 +44,9 @@ Ingest information.
 
 `thread_pool`::
 Statistics about each thread pool, including current size, queue size and rejected tasks.
+
+`script`::
+Contains script statistics of the cluster.
 --
 
 [role="child_attributes"]
@@ -282,6 +285,65 @@ Number of tasks completed by the thread pool executor.
 =======
 ======
 
+[[cluster-info-api-response-body-script]]
+`script`::
+(object)
+Contains script statistics of the cluster.
++
+.Properties of `script`
+[%collapsible%open]
+======
+`compilations`::
+(integer)
+Total number of inline script compilations performed by the cluster.
+
+`compilations_history`::
+(object)
+Contains the recent history of script compilations.
+
+.Properties of `compilations_history`
+[%collapsible%open]
+=======
+`5m`::
+(long)
+The number of script compilations in the last five minutes.
+`15m`::
+(long)
+The number of script compilations in the last fifteen minutes.
+`24h`::
+(long)
+The number of script compilations in the last twenty-four hours.
+=======
+
+`cache_evictions`::
+(integer)
+Total number of times the script cache has evicted old data.
+
+
+`cache_evictions_history`::
+(object)
+Contains the recent history of script cache evictions.
+
+.Properties of `cache_evictions`
+[%collapsible%open]
+=======
+`5m`::
+(long)
+The number of script cache evictions in the last five minutes.
+`15m`::
+(long)
+The number of script cache evictions in the last fifteen minutes.
+`24h`::
+(long)
+The number of script cache evictions in the last twenty-four hours.
+=======
+
+`compilation_limit_triggered`::
+(integer)
+Total number of times the <<script-compilation-circuit-breaker,script
+compilation>> circuit breaker has limited inline script compilations.
+======
+
 [[cluster-info-api-example]]
 ==== {api-examples-title}
 
@@ -299,6 +361,9 @@ GET /_info/ingest
 # returns the thread_pool info of the cluster
 GET /_info/thread_pool
 
+# returns the script info of the cluster
+GET /_info/script
+
 # returns the http and ingest info of the cluster
 GET /_info/http,ingest
 ----

+ 0 - 2
docs/reference/search/search-your-data/knn-search.asciidoc

@@ -334,8 +334,6 @@ calculated on the combined set of `knn` and `query` matches.
 [[semantic-search]]
 ==== Perform semantic search
 
-experimental[]
-
 kNN search enables you to perform semantic search by using a previously deployed
 {ml-docs}/ml-nlp-search-compare.html#ml-nlp-text-embedding[text embedding model].
 Instead of literal matching on search terms, semantic search retrieves results

+ 72 - 72
gradle/verification-metadata.xml

@@ -2488,124 +2488,124 @@
             <sha256 value="015d5c229f3cd5c0ebf175c1da08d596d94043362ae9d92637d88848c90537c8" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-analysis-common" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-analysis-common-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="5c5a049f51a233d2a3f0c50bdb625b52905c1c537b3e38e040e6517bfa074b2b" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-analysis-common" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-analysis-common-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="dc54c36b1a6e51d6a39f70e760570d1aaa17558980b09a4aab06da64f932dd62" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-analysis-icu" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-analysis-icu-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="6e378b9a818699e6fcb3cdda4187a4672bfa47b2de288a3c8f258b243954f476" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-analysis-icu" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-analysis-icu-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="184899387f830c370e4c2814eb11cd4cd630f6257fab3db56fa21d55ac845adf" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-analysis-kuromoji" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-analysis-kuromoji-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="b05149a2feae54626e870a7829e8b00245f59be5985809e1c55830e5b2f219cc" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-analysis-kuromoji" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-analysis-kuromoji-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="6233a2a517a748522fe44683778dc0566f2d2a427203a092fdb842154b545662" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-analysis-morfologik" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-analysis-morfologik-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="aa340253882a997370ddb3a0350f800d48386e496e9362834dbe8dd2eb86e385" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-analysis-morfologik" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-analysis-morfologik-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="103359e987ecc2b3f119bdf891428bc6a54f4371cb730a6b6c83aaf73119eec5" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-analysis-nori" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-analysis-nori-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="88259a45357e7ae085cddd3da37352839d0bcb8b3b7f99acc7065859d851ff06" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-analysis-nori" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-analysis-nori-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="83ac1452b550571171d321adb1fad4e7da1a29d7408346825e3137bad84e12f5" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-analysis-phonetic" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-analysis-phonetic-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="2dae3e41c203e8049f792e9328f3d291ffed42a0c6dc422e92d0f4b785759194" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-analysis-phonetic" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-analysis-phonetic-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="5e09eefd06c708604994c142fd0a1ee52315d02aa318bf3cbc25771e247a8f67" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-analysis-smartcn" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-analysis-smartcn-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="67ca1d76a228ef9b3d7ecf4522c250f30567c016fb3e352df54d74bcffcf6e95" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-analysis-smartcn" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-analysis-smartcn-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="3c28f3d2daa5e467ee44efb1b345f2aacffc790b6af40ae577ccc941fcddc735" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-analysis-stempel" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-analysis-stempel-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="d3bafcd84a883451c24731da5144bf018fa819e909cb05776635a7ba11940826" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-analysis-stempel" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-analysis-stempel-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="38674e2413c6097a1557abb18ed88aa9fd41624ccecd9d301cdf4bacfd3bff97" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-backward-codecs" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-backward-codecs-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="4ae23e18f3ee52ae0eaceffc70daa62023fdc2cc37896408e68c0b5969235167" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-backward-codecs" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-backward-codecs-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="858c1f5bc5403d35f4783e3de50058d54ebce24eba13e61705c0b898ababc801" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-codecs" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-codecs-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="256d0c240e96d645d872de342cbcfda3a8ee55e4ee34312d4d2214cf0825f70e" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-codecs" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-codecs-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="c90eae3399f31f5b5b70590dbd4e2d444c772cb93a64414b9a56564efa424bb5" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-core" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-core-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="847a6870085fbe3ffb1b61291d1555c1a3b3e4f9f1fc84bb311527822134b93b" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-core" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-core-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="334596f8dd6e5d03624d8ac31a9f024c65358a50ca6810bbbaedc8841481ee46" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-expressions" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-expressions-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="31c06faa38ddd9f529f81b61c789f926c7f19f198ae6eaa13a84c1cc36f94530" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-expressions" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-expressions-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="b4048f71261d04ac795ca19e607e74e3c733912247b037e00cebcee9204c0df4" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-grouping" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-grouping-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="a5f24c0a842575faadb001ad6651fa59c611dc3cba70903d27243ebfc58c264f" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-grouping" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-grouping-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="e5710ec50aa5ea1414141c17fe10e87e4c5980d66890f17a61ddd6eaef37cf4c" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-highlighter" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-highlighter-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="96c1ceeae0ec2e3a5764f196695c56833b737716235fa51bd55c1fbc07621890" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-highlighter" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-highlighter-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="50d98c27c521cb40642e41903dc20bb0fae61159528b7d3d503726e2326c4496" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-join" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-join-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="792e44cf559ebd0c7173ec6947c0305643767504043c30086c17aeea94e3adc5" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-join" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-join-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="8174a381295deba79dca2bc70e11e895a558e04314db6ad7ffa4190fc450c67e" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-memory" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-memory-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="08b97f8a31b002d3805bcc7aa3bfd007c0c3c3a15637d85ed0c2661a39a1b8ed" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-memory" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-memory-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="687f3ac2d9586c3d5d1c1a12d296265a1e2f6fe62e6f3110eb63306b426d83c5" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-misc" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-misc-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="75efe101120e3f762ba7924373d489ca95c67cb976ef34967b599c1145cee3ec" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-misc" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-misc-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="cce3e0cb150c5b5223a3643749858b5093ee0ecbb410d52d25d5aed161f7e2c0" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-queries" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-queries-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="3396de8adf2932902c1f41a4c5c0ac60864d837d978e11a08387ecd05ae43533" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-queries" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-queries-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="53f64d898bd8563e4ff102de79fe0d1b4589a678f1155bf2aab98cb38a71385e" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-queryparser" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-queryparser-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="2c66472d5c93c2681f15f46e366dc2359e0054e3745e7ea10b2a2cb69996447a" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-queryparser" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-queryparser-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="7f6d2fbcc4522a9dcb57006cd753907a3439023add3a86586942c60e2b032b67" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-sandbox" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-sandbox-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="56986817350e5b415130316c794dcc16b73f9dd835af311c3a1d28d1f5e86895" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-sandbox" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-sandbox-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="c34508a739d481a787df484bfa056f7f02a4c037fee93ee89337f6fe639d01da" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-spatial-extras" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-spatial-extras-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="fbb2be4c0e10c12903ceb5cf6cc931c07c9f331d2f247ca975781235004a4693" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-spatial-extras" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-spatial-extras-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="30933d9265ec2bb1022cad1320a332be4e42289bffd83fdd8630539f84806387" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-spatial3d" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-spatial3d-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="16eb558e5120bcf9c609c8a8f5083176ed260f8af31366a1bbcb9521c78608fa" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-spatial3d" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-spatial3d-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="f85895acf2fefc38db65538ad11e0bba9e30a23dc36a5b3e630c9a5da6e799d4" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-suggest" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-suggest-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="74b766301bedd21579b473a6a65b280ae5408b7b8144adb5f29d2520494bd40e" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-suggest" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-suggest-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="0bc82ca849ead94f9b2f1981c6a2c6c3737172a3219e9425a4ca6baf3ba33632" origin="Generated by Gradle"/>
          </artifact>
       </component>
-      <component group="org.apache.lucene" name="lucene-test-framework" version="9.7.0-snapshot-24df30cca69">
-         <artifact name="lucene-test-framework-9.7.0-snapshot-24df30cca69.jar">
-            <sha256 value="0e93af73fc5bd1c836fbe45c2050161064c17df5d2e1c0b5b014280d7647deaa" origin="Generated by Gradle"/>
+      <component group="org.apache.lucene" name="lucene-test-framework" version="9.7.0-snapshot-41cd1f7a88c">
+         <artifact name="lucene-test-framework-9.7.0-snapshot-41cd1f7a88c.jar">
+            <sha256 value="d330f1971e52fca545da2dcb44e00e1702e2c47f8448cbce5f0230c1a7b085c5" origin="Generated by Gradle"/>
          </artifact>
       </component>
       <component group="org.apache.maven" name="maven-model" version="3.6.2">

+ 1 - 0
modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java

@@ -137,6 +137,7 @@ public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase {
         filters.put("germanstem", GermanStemTokenFilterFactory.class);
         // this filter is not exposed and should only be used internally
         filters.put("fixedshingle", Void.class);
+        filters.put("word2vecsynonym", Void.class); // not exposed
         return filters;
     }
 

+ 2 - 1
rest-api-spec/src/main/resources/rest-api-spec/api/cluster.info.json

@@ -23,7 +23,8 @@
                               "_all",
                               "http",
                               "ingest",
-                              "thread_pool"
+                              "thread_pool",
+                              "script"
                           ],
                           "description":"Limit the information returned to the specified target."
                       }

+ 1 - 0
rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml

@@ -15,6 +15,7 @@ setup:
   - is_true: http
   - is_true: ingest
   - is_true: thread_pool
+  - is_true: script
 
 ---
 "Cluster Info fails when mixing _all with other targets":

+ 16 - 0
rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml

@@ -0,0 +1,16 @@
+---
+"Cluster HTTP Info":
+  - skip:
+      version: " - 8.8.99"
+      reason: "/_info/script only available from v8.9"
+
+  - do:
+      cluster.info:
+        target: [ script ]
+
+  - is_true: cluster_name
+  - is_true: script
+
+  - gte: { script.compilations: 0 }
+  - gte: { script.cache_evictions: 0 }
+  - gte: { script.compilation_limit_triggered: 0 }

+ 18 - 7
server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java

@@ -58,7 +58,7 @@ class BulkPrimaryExecutionContext {
     private ItemProcessingState currentItemState;
     private DocWriteRequest<?> requestToExecute;
     private BulkItemResponse executionResult;
-    private int retryCounter;
+    private int updateRetryCounter;
 
     BulkPrimaryExecutionContext(BulkShardRequest request, IndexShard primary) {
         this.request = request;
@@ -84,7 +84,7 @@ class BulkPrimaryExecutionContext {
             : "moving to next but current item wasn't completed (state: " + currentItemState + ")";
         currentItemState = ItemProcessingState.INITIAL;
         currentIndex = findNextNonAborted(currentIndex + 1);
-        retryCounter = 0;
+        updateRetryCounter = 0;
         requestToExecute = null;
         executionResult = null;
         assert assertInvariants(ItemProcessingState.INITIAL);
@@ -105,9 +105,9 @@ class BulkPrimaryExecutionContext {
         return executionResult;
     }
 
-    /** returns the number of times the current operation has been retried */
-    public int getRetryCounter() {
-        return retryCounter;
+    /** returns the number of times the current update operation has been retried */
+    public int getUpdateRetryCounter() {
+        return updateRetryCounter;
     }
 
     /** returns true if the request needs to wait for a mapping update to arrive from the master */
@@ -178,8 +178,19 @@ class BulkPrimaryExecutionContext {
         assert assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE);
     }
 
+    public void resetForUpdateRetry() {
+        assert assertInvariants(ItemProcessingState.EXECUTED);
+        updateRetryCounter++;
+        resetForExecutionRetry();
+    }
+
+    public void resetForMappingUpdateRetry() {
+        assert assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE);
+        resetForExecutionRetry();
+    }
+
     /** resets the current item state, prepare for a new execution */
-    public void resetForExecutionForRetry() {
+    private void resetForExecutionRetry() {
         assert assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE, ItemProcessingState.EXECUTED);
         currentItemState = ItemProcessingState.INITIAL;
         requestToExecute = null;
@@ -292,7 +303,7 @@ class BulkPrimaryExecutionContext {
         assert Arrays.asList(expectedCurrentState).contains(currentItemState)
             : "expected current state [" + currentItemState + "] to be one of " + Arrays.toString(expectedCurrentState);
         assert currentIndex >= 0 : currentIndex;
-        assert retryCounter >= 0 : retryCounter;
+        assert updateRetryCounter >= 0 : updateRetryCounter;
         switch (currentItemState) {
             case INITIAL:
                 assert requestToExecute == null : requestToExecute;

+ 3 - 3
server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java

@@ -385,7 +385,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
                         @Override
                         public void onResponse(Void v) {
                             assert context.requiresWaitingForMappingUpdate();
-                            context.resetForExecutionForRetry();
+                            context.resetForMappingUpdateRetry();
                         }
 
                         @Override
@@ -425,8 +425,8 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
         if (isUpdate
             && isFailed
             && isConflictException(executionResult.getFailure().getCause())
-            && context.getRetryCounter() < ((UpdateRequest) docWriteRequest).retryOnConflict()) {
-            context.resetForExecutionForRetry();
+            && context.getUpdateRetryCounter() < ((UpdateRequest) docWriteRequest).retryOnConflict()) {
+            context.resetForUpdateRetry();
             return;
         }
         final BulkItemResponse response;

+ 9 - 4
server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java

@@ -20,9 +20,9 @@ import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.cluster.ClusterState;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.routing.OperationRouting;
 import org.elasticsearch.cluster.routing.PlainShardIterator;
 import org.elasticsearch.cluster.routing.ShardIterator;
-import org.elasticsearch.cluster.routing.ShardRouting;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.inject.Inject;
 import org.elasticsearch.common.io.stream.Writeable;
@@ -96,7 +96,10 @@ public class TransportGetAction extends TransportSingleShardAction<GetRequest, G
         if (iterator == null) {
             return null;
         }
-        return new PlainShardIterator(iterator.shardId(), iterator.getShardRoutings().stream().filter(ShardRouting::isSearchable).toList());
+        return new PlainShardIterator(
+            iterator.shardId(),
+            iterator.getShardRoutings().stream().filter(shardRouting -> OperationRouting.canSearchShard(shardRouting, state)).toList()
+        );
     }
 
     @Override
@@ -110,11 +113,13 @@ public class TransportGetAction extends TransportSingleShardAction<GetRequest, G
         IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
         IndexShard indexShard = indexService.getShard(shardId.id());
         if (indexShard.routingEntry().isPromotableToPrimary() == false) {
+            assert indexShard.indexSettings().isFastRefresh() == false
+                : "a search shard should not receive a TransportGetAction for an index with fast refresh";
             handleGetOnUnpromotableShard(request, indexShard, listener);
             return;
         }
-        assert DiscoveryNode.isStateless(clusterService.getSettings()) == false
-            : "A TransportGetAction should always be handled by a search shard in Stateless";
+        assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh()
+            : "in Stateless a promotable to primary shard can receive a TransportGetAction only if an index has the fast refresh setting";
         if (request.realtime()) { // we are not tied to a refresh cycle here anyway
             asyncGet(request, shardId, listener);
         } else {

+ 4 - 1
server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java

@@ -14,6 +14,7 @@ import org.apache.logging.log4j.core.LoggerContext;
 import org.apache.logging.log4j.core.config.Configurator;
 import org.apache.lucene.util.Constants;
 import org.apache.lucene.util.StringHelper;
+import org.apache.lucene.util.VectorUtil;
 import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.Version;
 import org.elasticsearch.action.support.SubscribableListener;
@@ -186,7 +187,9 @@ class Elasticsearch {
             ReferenceDocs.class,
             // The following classes use MethodHandles.lookup during initialization, load them now (before SM) to be sure they succeed
             AbstractRefCounted.class,
-            SubscribableListener.class
+            SubscribableListener.class,
+            // We eagerly initialize to work around log4j permissions & JDK-8309727
+            VectorUtil.class
         );
 
         // install SM after natives, shutdown hooks, etc.

+ 15 - 2
server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java

@@ -30,6 +30,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
 
+import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING;
+
 public class OperationRouting {
 
     public static final Setting<Boolean> USE_ADAPTIVE_REPLICA_SELECTION_SETTING = Setting.boolSetting(
@@ -122,8 +124,11 @@ public class OperationRouting {
                 nodeCounts
             );
             if (iterator != null) {
-                var searchableShards = iterator.getShardRoutings().stream().filter(ShardRouting::isSearchable).toList();
-                set.add(new PlainShardIterator(iterator.shardId(), searchableShards));
+                var shardsThatCanHandleSearches = iterator.getShardRoutings()
+                    .stream()
+                    .filter(shardRouting -> canSearchShard(shardRouting, clusterState))
+                    .toList();
+                set.add(new PlainShardIterator(iterator.shardId(), shardsThatCanHandleSearches));
             }
         }
         return GroupShardsIterator.sortAndCreate(new ArrayList<>(set));
@@ -262,4 +267,12 @@ public class OperationRouting {
         IndexMetadata indexMetadata = indexMetadata(clusterState, index);
         return new ShardId(indexMetadata.getIndex(), IndexRouting.fromIndexMetadata(indexMetadata).getShard(id, routing));
     }
+
+    public static boolean canSearchShard(ShardRouting shardRouting, ClusterState clusterState) {
+        if (INDEX_FAST_REFRESH_SETTING.get(clusterState.metadata().index(shardRouting.index()).getSettings())) {
+            return shardRouting.isPromotableToPrimary();
+        } else {
+            return shardRouting.isSearchable();
+        }
+    }
 }

+ 10 - 1
server/src/main/java/org/elasticsearch/index/IndexSettings.java

@@ -638,6 +638,7 @@ public final class IndexSettings {
     private volatile Translog.Durability durability;
     private volatile TimeValue syncInterval;
     private volatile TimeValue refreshInterval;
+    private final boolean fastRefresh;
     private volatile ByteSizeValue flushThresholdSize;
     private volatile TimeValue flushThresholdAge;
     private volatile ByteSizeValue generationThresholdSize;
@@ -787,7 +788,8 @@ public final class IndexSettings {
         defaultFields = scopedSettings.get(DEFAULT_FIELD_SETTING);
         syncInterval = INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.get(settings);
         refreshInterval = scopedSettings.get(INDEX_REFRESH_INTERVAL_SETTING);
-        if (scopedSettings.get(INDEX_FAST_REFRESH_SETTING) && DiscoveryNode.isStateless(nodeSettings) == false) {
+        fastRefresh = scopedSettings.get(INDEX_FAST_REFRESH_SETTING);
+        if (fastRefresh && DiscoveryNode.isStateless(nodeSettings) == false) {
             throw new IllegalArgumentException(INDEX_FAST_REFRESH_SETTING.getKey() + " is allowed only in stateless");
         }
         flushThresholdSize = scopedSettings.get(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING);
@@ -1119,6 +1121,13 @@ public final class IndexSettings {
         return refreshInterval;
     }
 
+    /**
+     * Only intended for stateless.
+     */
+    public boolean isFastRefresh() {
+        return fastRefresh;
+    }
+
     /**
      * Returns the transaction log threshold size when to forcefully flush the index and clear the transaction log.
      */

+ 43 - 1
server/src/main/java/org/elasticsearch/index/engine/Engine.java

@@ -38,6 +38,7 @@ import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
 import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion;
 import org.elasticsearch.common.metrics.CounterMetric;
 import org.elasticsearch.common.util.concurrent.ReleasableLock;
+import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException;
 import org.elasticsearch.core.CheckedRunnable;
 import org.elasticsearch.core.Nullable;
 import org.elasticsearch.core.Releasable;
@@ -77,6 +78,7 @@ import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
 import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.locks.Condition;
@@ -1917,7 +1919,47 @@ public abstract class Engine implements Closeable {
      * @param translogRecoveryRunner the translog recovery runner
      * @param recoverUpToSeqNo       the upper bound, inclusive, of sequence number to be recovered
      */
-    public abstract Engine recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo) throws IOException;
+    // TODO move this blocking implementation into tests (adding a timeout) and make all the production usages fully async
+    public final void recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo) throws IOException {
+        final var future = new PlainActionFuture<Void>();
+        recoverFromTranslog(translogRecoveryRunner, recoverUpToSeqNo, future);
+        try {
+            future.get();
+        } catch (ExecutionException e) {
+            // This is a (temporary) adapter between the older synchronous (blocking) code and the newer (async) API. Callers expect
+            // exceptions to be thrown directly, but Future#get adds an ExecutionException wrapper which we must remove to preserve the
+            // expected exception semantics.
+            if (e.getCause() instanceof IOException ioException) {
+                throw ioException;
+            } else if (e.getCause() instanceof RuntimeException runtimeException) {
+                throw runtimeException;
+            } else {
+                // the old code was "throws IOException" so we shouldn't see any other exception types here
+                logger.error("checked non-IOException unexpectedly thrown", e);
+                assert false : e;
+                throw new UncategorizedExecutionException("recoverFromTranslog", e);
+            }
+        } catch (InterruptedException e) {
+            // We don't really use interrupts in this area so this is somewhat unexpected (unless perhaps we're shutting down), just treat
+            // it like any other exception.
+            Thread.currentThread().interrupt();
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * Performs recovery from the transaction log up to {@code recoverUpToSeqNo} (inclusive).
+     * This operation will close the engine if the recovery fails.
+     *
+     * @param translogRecoveryRunner the translog recovery runner
+     * @param recoverUpToSeqNo       the upper bound, inclusive, of sequence number to be recovered
+     * @param listener               listener notified on completion of the recovery, whether successful or otherwise
+     */
+    public abstract void recoverFromTranslog(
+        TranslogRecoveryRunner translogRecoveryRunner,
+        long recoverUpToSeqNo,
+        ActionListener<Void> listener
+    );
 
     /**
      * Do not replay translog operations, but make the engine be ready.

+ 56 - 44
server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java

@@ -43,6 +43,7 @@ import org.elasticsearch.Version;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.action.support.PlainActionFuture;
+import org.elasticsearch.action.support.SubscribableListener;
 import org.elasticsearch.cluster.metadata.DataStream;
 import org.elasticsearch.common.lucene.LoggerInfoStream;
 import org.elasticsearch.common.lucene.Lucene;
@@ -520,25 +521,24 @@ public class InternalEngine extends Engine {
     }
 
     @Override
-    public InternalEngine recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo) throws IOException {
-        try (ReleasableLock lock = readLock.acquire()) {
-            ensureOpen();
-            if (pendingTranslogRecovery.get() == false) {
-                throw new IllegalStateException("Engine has already been recovered");
-            }
-            try {
-                recoverFromTranslogInternal(translogRecoveryRunner, recoverUpToSeqNo);
-            } catch (Exception e) {
-                try {
-                    pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush
-                    failEngine("failed to recover from translog", e);
-                } catch (Exception inner) {
-                    e.addSuppressed(inner);
+    public void recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo, ActionListener<Void> listener) {
+        ActionListener.run(listener, l -> {
+            try (ReleasableLock lock = readLock.acquire()) {
+                ensureOpen();
+                if (pendingTranslogRecovery.get() == false) {
+                    throw new IllegalStateException("Engine has already been recovered");
                 }
-                throw e;
+                recoverFromTranslogInternal(translogRecoveryRunner, recoverUpToSeqNo, l.delegateResponse((ll, e) -> {
+                    try {
+                        pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush
+                        failEngine("failed to recover from translog", e);
+                    } catch (Exception inner) {
+                        e.addSuppressed(inner);
+                    }
+                    ll.onFailure(e);
+                }));
             }
-        }
-        return this;
+        });
     }
 
     @Override
@@ -547,33 +547,45 @@ public class InternalEngine extends Engine {
         pendingTranslogRecovery.set(false); // we are good - now we can commit
     }
 
-    private void recoverFromTranslogInternal(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo) throws IOException {
-        final int opsRecovered;
-        final long localCheckpoint = getProcessedLocalCheckpoint();
-        if (localCheckpoint < recoverUpToSeqNo) {
-            try (Translog.Snapshot snapshot = newTranslogSnapshot(localCheckpoint + 1, recoverUpToSeqNo)) {
-                opsRecovered = translogRecoveryRunner.run(this, snapshot);
-            } catch (Exception e) {
-                throw new EngineException(shardId, "failed to recover from translog", e);
-            }
-        } else {
-            opsRecovered = 0;
-        }
-        // flush if we recovered something or if we have references to older translogs
-        // note: if opsRecovered == 0 and we have older translogs it means they are corrupted or 0 length.
-        assert pendingTranslogRecovery.get() : "translogRecovery is not pending but should be";
-        pendingTranslogRecovery.set(false); // we are good - now we can commit
-        logger.trace(
-            () -> format(
-                "flushing post recovery from translog: ops recovered [%s], current translog generation [%s]",
-                opsRecovered,
-                translog.currentFileGeneration()
-            )
-        );
-        PlainActionFuture<FlushResult> future = PlainActionFuture.newFuture();
-        flush(false, true, future);
-        future.actionGet();
-        translog.trimUnreferencedReaders();
+    private void recoverFromTranslogInternal(
+        TranslogRecoveryRunner translogRecoveryRunner,
+        long recoverUpToSeqNo,
+        ActionListener<Void> listener
+    ) {
+        ActionListener.run(listener, l -> {
+            final int opsRecovered;
+            final long localCheckpoint = getProcessedLocalCheckpoint();
+            if (localCheckpoint < recoverUpToSeqNo) {
+                try (Translog.Snapshot snapshot = newTranslogSnapshot(localCheckpoint + 1, recoverUpToSeqNo)) {
+                    opsRecovered = translogRecoveryRunner.run(this, snapshot);
+                } catch (Exception e) {
+                    throw new EngineException(shardId, "failed to recover from translog", e);
+                }
+            } else {
+                opsRecovered = 0;
+            }
+            // flush if we recovered something or if we have references to older translogs
+            // note: if opsRecovered == 0 and we have older translogs it means they are corrupted or 0 length.
+            assert pendingTranslogRecovery.get() : "translogRecovery is not pending but should be";
+            pendingTranslogRecovery.set(false); // we are good - now we can commit
+            logger.trace(
+                () -> format(
+                    "flushing post recovery from translog: ops recovered [%s], current translog generation [%s]",
+                    opsRecovered,
+                    translog.currentFileGeneration()
+                )
+            );
+
+            // flush might do something async and complete the listener on a different thread, from which we must fork back to a generic
+            // thread to continue with recovery, but if it doesn't do anything async then there's no need to fork, hence why we use a
+            // SubscribableListener here
+            final var flushListener = new SubscribableListener<FlushResult>();
+            flush(false, true, flushListener);
+            flushListener.addListener(l.delegateFailureAndWrap((ll, r) -> {
+                translog.trimUnreferencedReaders();
+                ll.onResponse(null);
+            }), engineConfig.getThreadPool().generic(), null);
+        });
     }
 
     protected Translog.Snapshot newTranslogSnapshot(long fromSeqNo, long toSeqNo) throws IOException {

+ 15 - 9
server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java

@@ -518,16 +518,22 @@ public class ReadOnlyEngine extends Engine {
     }
 
     @Override
-    public Engine recoverFromTranslog(final TranslogRecoveryRunner translogRecoveryRunner, final long recoverUpToSeqNo) {
-        try (ReleasableLock lock = readLock.acquire()) {
-            ensureOpen();
-            try {
-                translogRecoveryRunner.run(this, Translog.Snapshot.EMPTY);
-            } catch (final Exception e) {
-                throw new EngineException(shardId, "failed to recover from empty translog snapshot", e);
+    public void recoverFromTranslog(
+        final TranslogRecoveryRunner translogRecoveryRunner,
+        final long recoverUpToSeqNo,
+        ActionListener<Void> listener
+    ) {
+        ActionListener.run(listener, l -> {
+            try (ReleasableLock lock = readLock.acquire()) {
+                ensureOpen();
+                try {
+                    translogRecoveryRunner.run(this, Translog.Snapshot.EMPTY);
+                } catch (final Exception e) {
+                    throw new EngineException(shardId, "failed to recover from empty translog snapshot", e);
+                }
             }
-        }
-        return this;
+            l.onResponse(null);
+        });
     }
 
     @Override

+ 1 - 0
server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java

@@ -21,6 +21,7 @@ import org.apache.lucene.search.similarities.BM25Similarity;
 import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.QueryBuilder;
+import org.apache.lucene.util.TermAndBoost;
 import org.elasticsearch.TransportVersion;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.io.stream.StreamInput;

+ 1 - 0
server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java

@@ -28,6 +28,7 @@ import org.apache.lucene.search.FuzzyQuery;
 import org.apache.lucene.search.MultiTermQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.QueryBuilder;
+import org.apache.lucene.util.TermAndBoost;
 import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings;
 import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.common.io.stream.StreamInput;

+ 1 - 0
server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java

@@ -18,6 +18,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.TermAndBoost;
 import org.elasticsearch.common.lucene.search.Queries;
 import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.query.AbstractQueryBuilder;

+ 12 - 4
server/src/main/java/org/elasticsearch/rest/action/info/RestClusterInfoAction.java

@@ -27,6 +27,7 @@ import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestCancellableNodeClient;
 import org.elasticsearch.rest.action.RestResponseListener;
+import org.elasticsearch.script.ScriptStats;
 import org.elasticsearch.threadpool.ThreadPoolStats;
 
 import java.io.IOException;
@@ -39,9 +40,10 @@ import java.util.function.Function;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
-import static org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest.Metric.HTTP;
-import static org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest.Metric.INGEST;
-import static org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest.Metric.THREAD_POOL;
+import static org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.HTTP;
+import static org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.INGEST;
+import static org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.SCRIPT;
+import static org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.THREAD_POOL;
 import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS;
 
 @ServerlessScope(Scope.PUBLIC)
@@ -61,7 +63,13 @@ public class RestClusterInfoAction extends BaseRestHandler {
         nodesStatsResponse -> nodesStatsResponse.getNodes()
             .stream()
             .map(NodeStats::getThreadPool)
-            .reduce(ThreadPoolStats.IDENTITY, ThreadPoolStats::merge)
+            .reduce(ThreadPoolStats.IDENTITY, ThreadPoolStats::merge),
+        //
+        SCRIPT.metricName(),
+        nodesStatsResponse -> nodesStatsResponse.getNodes()
+            .stream()
+            .map(NodeStats::getScriptStats)
+            .reduce(ScriptStats.IDENTITY, ScriptStats::merge)
     );
     static final Set<String> AVAILABLE_TARGETS = RESPONSE_MAPPER.keySet();
 

+ 12 - 0
server/src/main/java/org/elasticsearch/script/ScriptContextStats.java

@@ -80,6 +80,18 @@ public record ScriptContextStats(
         );
     }
 
+    public static ScriptContextStats merge(ScriptContextStats first, ScriptContextStats second) {
+        assert first.context.equals(second.context) : "To merge 2 ScriptContextStats both of them must have the same context.";
+        return new ScriptContextStats(
+            first.context,
+            first.compilations + second.compilations,
+            TimeSeries.merge(first.compilationsHistory, second.compilationsHistory),
+            first.cacheEvictions + second.cacheEvictions,
+            TimeSeries.merge(first.cacheEvictionsHistory, second.cacheEvictionsHistory),
+            first.compilationLimitTriggered + second.compilationLimitTriggered
+        );
+    }
+
     @Override
     public void writeTo(StreamOutput out) throws IOException {
         out.writeString(context);

+ 26 - 0
server/src/main/java/org/elasticsearch/script/ScriptStats.java

@@ -19,6 +19,8 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
 import org.elasticsearch.xcontent.ToXContent;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -51,6 +53,8 @@ public record ScriptStats(
     TimeSeries cacheEvictionsHistory
 ) implements Writeable, ChunkedToXContent {
 
+    public static final ScriptStats IDENTITY = new ScriptStats(0, 0, 0, new TimeSeries(0), new TimeSeries(0));
+
     public ScriptStats(
         long compilations,
         long cacheEvictions,
@@ -68,6 +72,28 @@ public record ScriptStats(
         );
     }
 
+    public static ScriptStats merge(ScriptStats first, ScriptStats second) {
+        var mergedScriptContextStats = List.<ScriptContextStats>of();
+
+        if (first.contextStats.isEmpty() == false || second.contextStats.isEmpty() == false) {
+            var mapToCollectMergedStats = new HashMap<String, ScriptContextStats>();
+
+            first.contextStats.forEach(cs -> mapToCollectMergedStats.merge(cs.context(), cs, ScriptContextStats::merge));
+            second.contextStats.forEach(cs -> mapToCollectMergedStats.merge(cs.context(), cs, ScriptContextStats::merge));
+
+            mergedScriptContextStats = new ArrayList<>(mapToCollectMergedStats.values());
+        }
+
+        return new ScriptStats(
+            mergedScriptContextStats,
+            first.compilations + second.compilations,
+            first.cacheEvictions + second.cacheEvictions,
+            first.compilationLimitTriggered + second.compilationLimitTriggered,
+            TimeSeries.merge(first.compilationsHistory, second.compilationsHistory),
+            TimeSeries.merge(first.cacheEvictionsHistory, second.cacheEvictionsHistory)
+        );
+    }
+
     public static ScriptStats read(List<ScriptContextStats> contextStats) {
         long compilations = 0;
         long cacheEvictions = 0;

+ 9 - 0
server/src/main/java/org/elasticsearch/script/TimeSeries.java

@@ -45,6 +45,15 @@ public class TimeSeries implements Writeable, ToXContentFragment {
         return new TimeSeries(fiveMinutes, fifteenMinutes, twentyFourHours, total);
     }
 
+    public static TimeSeries merge(TimeSeries first, TimeSeries second) {
+        return new TimeSeries(
+            first.fiveMinutes + second.fiveMinutes,
+            first.fifteenMinutes + second.fifteenMinutes,
+            first.twentyFourHours + second.twentyFourHours,
+            first.total + second.total
+        );
+    }
+
     public TimeSeries(StreamInput in) throws IOException {
         fiveMinutes = in.readVLong();
         fifteenMinutes = in.readVLong();

+ 6 - 11
server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java

@@ -230,18 +230,13 @@ public class SniffConnectionStrategy extends RemoteConnectionStrategy {
 
         if (seedNodesSuppliers.hasNext()) {
             final Consumer<Exception> onFailure = e -> {
-                if (isRetryableException(e)) {
-                    if (seedNodesSuppliers.hasNext()) {
-                        logger.debug(
-                            () -> format("fetching nodes from external cluster [%s] failed moving to next seed node", clusterAlias),
-                            e
-                        );
-                        collectRemoteNodes(seedNodesSuppliers, listener);
-                        return;
-                    }
+                if (isRetryableException(e) && seedNodesSuppliers.hasNext()) {
+                    logger.debug(() -> "fetching nodes from external cluster [" + clusterAlias + "] failed moving to next seed node", e);
+                    collectRemoteNodes(seedNodesSuppliers, listener);
+                } else {
+                    logger.warn(() -> "fetching nodes from external cluster [" + clusterAlias + "] failed", e);
+                    listener.onFailure(e);
                 }
-                logger.warn(() -> "fetching nodes from external cluster [" + clusterAlias + "] failed", e);
-                listener.onFailure(e);
             };
 
             final DiscoveryNode seedNode = seedNodesSuppliers.next().get();

+ 16 - 10
server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java

@@ -277,6 +277,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
         }, listener -> listener.onResponse(null), ASSERTING_DONE_LISTENER);
         assertTrue(context.isInitial());
         assertTrue(context.hasMoreOperationsToExecute());
+        assertThat(context.getUpdateRetryCounter(), equalTo(0));
 
         assertThat("mappings were \"updated\" once", updateCalled.get(), equalTo(1));
 
@@ -572,7 +573,9 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
 
     public void testUpdateRequestWithConflictFailure() throws Exception {
         IndexSettings indexSettings = new IndexSettings(indexMetadata(), Settings.EMPTY);
-        DocWriteRequest<UpdateRequest> writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value");
+        int retries = randomInt(4);
+        DocWriteRequest<UpdateRequest> writeRequest = new UpdateRequest("index", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value")
+            .retryOnConflict(retries);
         BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest);
 
         IndexRequest updateResponse = new IndexRequest("index").id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value");
@@ -599,16 +602,19 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
         BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items);
 
         randomlySetIgnoredPrimaryResponse(primaryRequest);
-
         BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard);
-        TransportShardBulkAction.executeBulkItemRequest(
-            context,
-            updateHelper,
-            threadPool::absoluteTimeInMillis,
-            new NoopMappingUpdatePerformer(),
-            listener -> listener.onResponse(null),
-            ASSERTING_DONE_LISTENER
-        );
+
+        for (int i = 0; i < retries + 1; i++) {
+            assertTrue(context.hasMoreOperationsToExecute());
+            TransportShardBulkAction.executeBulkItemRequest(
+                context,
+                updateHelper,
+                threadPool::absoluteTimeInMillis,
+                new NoopMappingUpdatePerformer(),
+                listener -> listener.onResponse(null),
+                ASSERTING_DONE_LISTENER
+            );
+        }
         assertFalse(context.hasMoreOperationsToExecute());
 
         assertNull(context.getLocationToSync());

+ 12 - 7
server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java

@@ -4182,15 +4182,18 @@ public class IndexShardTests extends IndexShardTestCase {
         CountDownLatch closeDoneLatch = new CountDownLatch(1);
         IndexShard shard = newStartedShard(false, Settings.EMPTY, config -> new InternalEngine(config) {
             @Override
-            public InternalEngine recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo)
-                throws IOException {
+            public void recoverFromTranslog(
+                TranslogRecoveryRunner translogRecoveryRunner,
+                long recoverUpToSeqNo,
+                ActionListener<Void> listener
+            ) {
                 readyToCloseLatch.countDown();
                 try {
                     closeDoneLatch.await();
                 } catch (InterruptedException e) {
                     throw new AssertionError(e);
                 }
-                return super.recoverFromTranslog(translogRecoveryRunner, recoverUpToSeqNo);
+                super.recoverFromTranslog(translogRecoveryRunner, recoverUpToSeqNo, listener);
             }
         });
 
@@ -4241,16 +4244,18 @@ public class IndexShardTests extends IndexShardTestCase {
         CountDownLatch snapshotDoneLatch = new CountDownLatch(1);
         IndexShard shard = newStartedShard(false, Settings.EMPTY, config -> new InternalEngine(config) {
             @Override
-            public InternalEngine recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo)
-                throws IOException {
-                InternalEngine internalEngine = super.recoverFromTranslog(translogRecoveryRunner, recoverUpToSeqNo);
+            public void recoverFromTranslog(
+                TranslogRecoveryRunner translogRecoveryRunner,
+                long recoverUpToSeqNo,
+                ActionListener<Void> listener
+            ) {
+                super.recoverFromTranslog(translogRecoveryRunner, recoverUpToSeqNo, listener);
                 readyToSnapshotLatch.countDown();
                 try {
                     snapshotDoneLatch.await();
                 } catch (InterruptedException e) {
                     throw new AssertionError(e);
                 }
-                return internalEngine;
             }
         });
 

+ 58 - 0
server/src/test/java/org/elasticsearch/script/ScriptContextStatsTests.java

@@ -0,0 +1,58 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.script;
+
+import org.elasticsearch.test.ESTestCase;
+
+import static org.elasticsearch.script.TimeSeriesTests.randomTimeseries;
+
+public class ScriptContextStatsTests extends ESTestCase {
+    public void testMerge() {
+        {
+            var first = randomScriptContextStats();
+            var second = randomScriptContextStats();
+
+            var e = expectThrows(AssertionError.class, () -> ScriptContextStats.merge(first, second));
+            assertEquals(e.getMessage(), "To merge 2 ScriptContextStats both of them must have the same context.");
+        }
+        {
+            var context = randomAlphaOfLength(30);
+            var first = randomScriptContextStats(context);
+            var second = randomScriptContextStats(context);
+
+            assertEquals(
+                ScriptContextStats.merge(first, second),
+                new ScriptContextStats(
+                    context,
+                    first.compilations() + second.compilations(),
+                    TimeSeries.merge(first.compilationsHistory(), second.compilationsHistory()),
+                    first.cacheEvictions() + second.cacheEvictions(),
+                    TimeSeries.merge(first.cacheEvictionsHistory(), second.cacheEvictionsHistory()),
+                    first.compilationLimitTriggered() + second.compilationLimitTriggered()
+                )
+            );
+        }
+    }
+
+    public static ScriptContextStats randomScriptContextStats() {
+        return randomScriptContextStats(randomAlphaOfLength(30));
+    }
+
+    public static ScriptContextStats randomScriptContextStats(String contextName) {
+        return new ScriptContextStats(
+            contextName,
+            randomLongBetween(0, 10000),
+            randomTimeseries(),
+            randomLongBetween(0, 10000),
+            randomTimeseries(),
+            randomLongBetween(0, 10000)
+        );
+    }
+
+}

+ 34 - 0
server/src/test/java/org/elasticsearch/script/ScriptStatsTests.java

@@ -22,6 +22,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.function.Function;
 
+import static org.elasticsearch.script.ScriptContextStatsTests.randomScriptContextStats;
+import static org.elasticsearch.script.TimeSeriesTests.randomTimeseries;
 import static org.hamcrest.Matchers.equalTo;
 
 public class ScriptStatsTests extends ESTestCase {
@@ -166,6 +168,38 @@ public class ScriptStatsTests extends ESTestCase {
         assertEquals(stats, deserStats);
     }
 
+    public void testMerge() {
+        var first = randomScriptStats();
+        var second = randomScriptStats();
+
+        assertEquals(
+            ScriptStats.merge(first, second),
+            new ScriptStats(
+                List.of(
+                    ScriptContextStats.merge(first.contextStats().get(0), second.contextStats().get(0)),
+                    ScriptContextStats.merge(first.contextStats().get(1), second.contextStats().get(1)),
+                    ScriptContextStats.merge(first.contextStats().get(2), second.contextStats().get(2))
+                ),
+                first.compilations() + second.compilations(),
+                first.cacheEvictions() + second.cacheEvictions(),
+                first.compilationLimitTriggered() + second.compilationLimitTriggered(),
+                TimeSeries.merge(first.compilationsHistory(), second.compilationsHistory()),
+                TimeSeries.merge(first.cacheEvictionsHistory(), second.cacheEvictionsHistory())
+            )
+        );
+    }
+
+    public static ScriptStats randomScriptStats() {
+        return new ScriptStats(
+            List.of(randomScriptContextStats("context-a"), randomScriptContextStats("context-b"), randomScriptContextStats("context-c")),
+            randomLongBetween(0, 10000),
+            randomLongBetween(0, 10000),
+            randomLongBetween(0, 10000),
+            randomTimeseries(),
+            randomTimeseries()
+        );
+    }
+
     public ScriptContextStats serDeser(TransportVersion outVersion, TransportVersion inVersion, ScriptContextStats stats)
         throws IOException {
         try (BytesStreamOutput out = new BytesStreamOutput()) {

+ 37 - 0
server/src/test/java/org/elasticsearch/script/TimeSeriesTests.java

@@ -0,0 +1,37 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.script;
+
+import org.elasticsearch.test.ESTestCase;
+
+public class TimeSeriesTests extends ESTestCase {
+    public void testMerge() {
+        var first = randomTimeseries();
+        var second = randomTimeseries();
+
+        assertEquals(
+            TimeSeries.merge(first, second),
+            new TimeSeries(
+                first.fiveMinutes + second.fiveMinutes,
+                first.fifteenMinutes + second.fifteenMinutes,
+                first.twentyFourHours + second.twentyFourHours,
+                first.total + second.total
+            )
+        );
+    }
+
+    static TimeSeries randomTimeseries() {
+        return new TimeSeries(
+            randomLongBetween(0, 10000),
+            randomLongBetween(0, 10000),
+            randomLongBetween(0, 10000),
+            randomLongBetween(0, 10000)
+        );
+    }
+}

+ 3 - 1
test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java

@@ -195,7 +195,9 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase {
         entry("spanishpluralstem", Void.class),
         // LUCENE-10352
         entry("daitchmokotoffsoundex", Void.class),
-        entry("persianstem", Void.class)
+        entry("persianstem", Void.class),
+        // not exposed
+        entry("word2vecsynonym", Void.class)
     );
 
     static final Map<String, Class<?>> KNOWN_CHARFILTERS = Map.of(

+ 30 - 0
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java

@@ -31,6 +31,8 @@ import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults;
 import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults;
 import org.elasticsearch.xpack.core.ml.inference.results.TextSimilarityInferenceResults;
 import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertJapaneseTokenization;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertJapaneseTokenizationUpdate;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenizationUpdate;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig;
@@ -504,6 +506,13 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider {
         );
 
         // Tokenization
+        namedXContent.add(
+            new NamedXContentRegistry.Entry(
+                Tokenization.class,
+                BertJapaneseTokenization.NAME,
+                (p, c) -> BertJapaneseTokenization.fromXContent(p, (boolean) c)
+            )
+        );
         namedXContent.add(
             new NamedXContentRegistry.Entry(
                 Tokenization.class,
@@ -526,6 +535,13 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider {
             )
         );
 
+        namedXContent.add(
+            new NamedXContentRegistry.Entry(
+                TokenizationUpdate.class,
+                BertJapaneseTokenizationUpdate.NAME,
+                (p, c) -> BertJapaneseTokenizationUpdate.fromXContent(p)
+            )
+        );
         namedXContent.add(
             new NamedXContentRegistry.Entry(
                 TokenizationUpdate.class,
@@ -721,6 +737,13 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider {
         );
 
         // Tokenization
+        namedWriteables.add(
+            new NamedWriteableRegistry.Entry(
+                Tokenization.class,
+                BertJapaneseTokenization.NAME.getPreferredName(),
+                BertJapaneseTokenization::new
+            )
+        );
         namedWriteables.add(
             new NamedWriteableRegistry.Entry(Tokenization.class, BertTokenization.NAME.getPreferredName(), BertTokenization::new)
         );
@@ -729,6 +752,13 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider {
         );
         namedWriteables.add(new NamedWriteableRegistry.Entry(Tokenization.class, RobertaTokenization.NAME, RobertaTokenization::new));
 
+        namedWriteables.add(
+            new NamedWriteableRegistry.Entry(
+                TokenizationUpdate.class,
+                BertJapaneseTokenizationUpdate.NAME.getPreferredName(),
+                BertJapaneseTokenizationUpdate::new
+            )
+        );
         namedWriteables.add(
             new NamedWriteableRegistry.Entry(
                 TokenizationUpdate.class,

+ 73 - 0
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenization.java

@@ -0,0 +1,73 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.core.ml.inference.trainedmodel;
+
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.core.Nullable;
+import org.elasticsearch.xcontent.ConstructingObjectParser;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentParser;
+
+import java.io.IOException;
+
+public class BertJapaneseTokenization extends Tokenization {
+
+    public static final ParseField NAME = new ParseField("bert_ja");
+
+    public static ConstructingObjectParser<BertJapaneseTokenization, Void> createJpParser(boolean ignoreUnknownFields) {
+        ConstructingObjectParser<BertJapaneseTokenization, Void> parser = new ConstructingObjectParser<>(
+            "bert_japanese_tokenization",
+            ignoreUnknownFields,
+            a -> new BertJapaneseTokenization(
+                (Boolean) a[0],
+                (Boolean) a[1],
+                (Integer) a[2],
+                a[3] == null ? null : Truncate.fromString((String) a[3]),
+                (Integer) a[4]
+            )
+        );
+        Tokenization.declareCommonFields(parser);
+        return parser;
+    }
+
+    private static final ConstructingObjectParser<BertJapaneseTokenization, Void> JP_LENIENT_PARSER = createJpParser(true);
+    private static final ConstructingObjectParser<BertJapaneseTokenization, Void> JP_STRICT_PARSER = createJpParser(false);
+
+    public static BertJapaneseTokenization fromXContent(XContentParser parser, boolean lenient) {
+        return lenient ? JP_LENIENT_PARSER.apply(parser, null) : JP_STRICT_PARSER.apply(parser, null);
+    }
+
+    public BertJapaneseTokenization(
+        @Nullable Boolean doLowerCase,
+        @Nullable Boolean withSpecialTokens,
+        @Nullable Integer maxSequenceLength,
+        @Nullable Truncate truncate,
+        @Nullable Integer span
+    ) {
+        super(doLowerCase, withSpecialTokens, maxSequenceLength, truncate, span);
+    }
+
+    public BertJapaneseTokenization(StreamInput in) throws IOException {
+        super(in);
+    }
+
+    XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
+        return builder;
+    }
+
+    @Override
+    public String getWriteableName() {
+        return BertJapaneseTokenization.NAME.getPreferredName();
+    }
+
+    @Override
+    public String getName() {
+        return BertJapaneseTokenization.NAME.getPreferredName();
+    }
+}

+ 92 - 0
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationUpdate.java

@@ -0,0 +1,92 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.core.ml.inference.trainedmodel;
+
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.core.Nullable;
+import org.elasticsearch.xcontent.ConstructingObjectParser;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
+
+import java.io.IOException;
+import java.util.Optional;
+
+public class BertJapaneseTokenizationUpdate extends AbstractTokenizationUpdate {
+
+    public static final ParseField NAME = BertJapaneseTokenization.NAME;
+
+    public static ConstructingObjectParser<BertJapaneseTokenizationUpdate, Void> PARSER = new ConstructingObjectParser<>(
+        "bert_japanese_tokenization_update",
+        a -> new BertJapaneseTokenizationUpdate(a[0] == null ? null : Tokenization.Truncate.fromString((String) a[0]), (Integer) a[1])
+    );
+
+    static {
+        declareCommonParserFields(PARSER);
+    }
+
+    public static BertJapaneseTokenizationUpdate fromXContent(XContentParser parser) {
+        return PARSER.apply(parser, null);
+    }
+
+    public BertJapaneseTokenizationUpdate(@Nullable Tokenization.Truncate truncate, @Nullable Integer span) {
+        super(truncate, span);
+    }
+
+    public BertJapaneseTokenizationUpdate(StreamInput in) throws IOException {
+        super(in);
+    }
+
+    @Override
+    public Tokenization apply(Tokenization originalConfig) {
+        if (originalConfig instanceof BertJapaneseTokenization == false) {
+            throw ExceptionsHelper.badRequestException(
+                "Tokenization config of type [{}] can not be updated with a request of type [{}]",
+                originalConfig.getName(),
+                getName()
+            );
+        }
+
+        Tokenization.validateSpanAndTruncate(getTruncate(), getSpan());
+
+        if (isNoop()) {
+            return originalConfig;
+        }
+
+        if (getTruncate() != null && getTruncate().isInCompatibleWithSpan() == false) {
+            // When truncate value is incompatible with span wipe out
+            // the existing span setting to avoid an invalid combination of settings.
+            // This avoids the user have to set span to the special unset value
+            return new BertJapaneseTokenization(
+                originalConfig.doLowerCase(),
+                originalConfig.withSpecialTokens(),
+                originalConfig.maxSequenceLength(),
+                getTruncate(),
+                null
+            );
+        }
+
+        return new BertJapaneseTokenization(
+            originalConfig.doLowerCase(),
+            originalConfig.withSpecialTokens(),
+            originalConfig.maxSequenceLength(),
+            Optional.ofNullable(getTruncate()).orElse(originalConfig.getTruncate()),
+            Optional.ofNullable(getSpan()).orElse(originalConfig.getSpan())
+        );
+    }
+
+    @Override
+    public String getWriteableName() {
+        return BertJapaneseTokenization.NAME.getPreferredName();
+    }
+
+    @Override
+    public String getName() {
+        return BertJapaneseTokenization.NAME.getPreferredName();
+    }
+}

+ 2 - 0
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NlpConfigUpdate.java

@@ -35,6 +35,8 @@ public abstract class NlpConfigUpdate implements InferenceConfigUpdate, NamedXCo
         Map<String, BiFunction<Tokenization.Truncate, Integer, TokenizationUpdate>> knownTokenizers = Map.of(
             BertTokenization.NAME.getPreferredName(),
             BertTokenizationUpdate::new,
+            BertJapaneseTokenization.NAME.getPreferredName(),
+            BertJapaneseTokenizationUpdate::new,
             MPNetTokenization.NAME.getPreferredName(),
             MPNetTokenizationUpdate::new,
             RobertaTokenizationUpdate.NAME.getPreferredName(),

+ 8 - 1
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java

@@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
 import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege;
 import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver;
 import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege;
+import org.elasticsearch.xpack.core.security.authz.restriction.WorkflowResolver;
 import org.elasticsearch.xpack.core.security.support.MetadataUtils;
 
 import java.util.Arrays;
@@ -86,7 +87,13 @@ public class RoleDescriptorRequestValidator {
             );
         }
         if (roleDescriptor.hasWorkflowsRestriction()) {
-            // TODO: Validate workflow names here!
+            for (String workflowName : roleDescriptor.getRestriction().getWorkflows()) {
+                try {
+                    WorkflowResolver.resolveWorkflowByName(workflowName);
+                } catch (IllegalArgumentException e) {
+                    validationException = addValidationError(e.getMessage(), validationException);
+                }
+            }
         }
         return validationException;
     }

+ 17 - 5
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java

@@ -500,19 +500,20 @@ public abstract class IndexTemplateRegistry implements ClusterStateListener {
     }
 
     private void addIndexLifecyclePoliciesIfMissing(ClusterState state) {
-        Optional<IndexLifecycleMetadata> maybeMeta = Optional.ofNullable(state.metadata().custom(IndexLifecycleMetadata.TYPE));
+        IndexLifecycleMetadata metadata = state.metadata().custom(IndexLifecycleMetadata.TYPE);
         for (LifecyclePolicy policy : getPolicyConfigs()) {
             final AtomicBoolean creationCheck = policyCreationsInProgress.computeIfAbsent(
                 policy.getName(),
                 key -> new AtomicBoolean(false)
             );
             if (creationCheck.compareAndSet(false, true)) {
-                final boolean policyNeedsToBeCreated = maybeMeta.flatMap(
-                    ilmMeta -> Optional.ofNullable(ilmMeta.getPolicies().get(policy.getName()))
-                ).isPresent() == false;
-                if (policyNeedsToBeCreated) {
+                final LifecyclePolicy currentPolicy = metadata != null ? metadata.getPolicies().get(policy.getName()) : null;
+                if (Objects.isNull(currentPolicy)) {
                     logger.debug("adding lifecycle policy [{}] for [{}], because it doesn't exist", policy.getName(), getOrigin());
                     putPolicy(policy, creationCheck);
+                } else if (isUpgradeRequired(currentPolicy, policy)) {
+                    logger.info("upgrading lifecycle policy [{}] for [{}]", policy.getName(), getOrigin());
+                    putPolicy(policy, creationCheck);
                 } else {
                     logger.trace("not adding lifecycle policy [{}] for [{}], because it already exists", policy.getName(), getOrigin());
                     creationCheck.set(false);
@@ -521,6 +522,17 @@ public abstract class IndexTemplateRegistry implements ClusterStateListener {
         }
     }
 
+    /**
+     * Determines whether an index lifecycle policy should be upgraded to a newer version.
+     *
+     * @param currentPolicy The current lifecycle policy. Never null.
+     * @param newPolicy The new lifecycle policy. Never null.
+     * @return <code>true</code> if <code>newPolicy</code> should replace <code>currentPolicy</code>.
+     */
+    protected boolean isUpgradeRequired(LifecyclePolicy currentPolicy, LifecyclePolicy newPolicy) {
+        return false;
+    }
+
     private void putPolicy(final LifecyclePolicy policy, final AtomicBoolean creationCheck) {
         final Executor executor = threadPool.generic();
         executor.execute(() -> {

+ 2 - 1
x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/ilm-policy/profiling-60-days.json

@@ -33,6 +33,7 @@
   },
   "_meta": {
     "description": "default policy for Elastic Universal Profiling",
-    "managed": true
+    "managed": true,
+    "version": ${xpack.profiling.template.version}
   }
 }

+ 6 - 2
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/AbstractNlpConfigUpdateTestCase.java

@@ -62,17 +62,21 @@ abstract class AbstractNlpConfigUpdateTestCase<T extends NlpConfigUpdate> extend
             final String tokenizationKind;
             final TokenizationUpdate update;
             final Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values());
-            int testCase = randomInt(2);
+            int testCase = randomInt(3);
             switch (testCase) {
                 case 0 -> {
                     tokenizationKind = "bert";
                     update = new BertTokenizationUpdate(truncate, null);
                 }
                 case 1 -> {
+                    tokenizationKind = "bert_ja";
+                    update = new BertJapaneseTokenizationUpdate(truncate, null);
+                }
+                case 2 -> {
                     tokenizationKind = "mpnet";
                     update = new MPNetTokenizationUpdate(truncate, null);
                 }
-                case 2 -> {
+                case 3 -> {
                     tokenizationKind = "roberta";
                     update = new RobertaTokenizationUpdate(truncate, null);
                 }

+ 86 - 0
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationTests.java

@@ -0,0 +1,86 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.core.ml.inference.trainedmodel;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase;
+import org.junit.Before;
+
+import java.io.IOException;
+
+public class BertJapaneseTokenizationTests extends AbstractBWCSerializationTestCase<BertJapaneseTokenization> {
+
+    private boolean lenient;
+
+    public static BertJapaneseTokenization mutateForVersion(BertJapaneseTokenization instance, TransportVersion version) {
+        if (version.before(TransportVersion.V_8_2_0)) {
+            return new BertJapaneseTokenization(
+                instance.doLowerCase,
+                instance.withSpecialTokens,
+                instance.maxSequenceLength,
+                instance.truncate,
+                null
+            );
+        }
+        return instance;
+    }
+
+    @Before
+    public void chooseStrictOrLenient() {
+        lenient = randomBoolean();
+    }
+
+    @Override
+    protected BertJapaneseTokenization doParseInstance(XContentParser parser) throws IOException {
+        return BertJapaneseTokenization.createJpParser(lenient).apply(parser, null);
+    }
+
+    @Override
+    protected Writeable.Reader<BertJapaneseTokenization> instanceReader() {
+        return BertJapaneseTokenization::new;
+    }
+
+    @Override
+    protected BertJapaneseTokenization createTestInstance() {
+        return createRandom();
+    }
+
+    @Override
+    protected BertJapaneseTokenization mutateInstance(BertJapaneseTokenization instance) {
+        return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
+    }
+
+    @Override
+    protected BertJapaneseTokenization mutateInstanceForVersion(BertJapaneseTokenization instance, TransportVersion version) {
+        return mutateForVersion(instance, version);
+    }
+
+    public static BertJapaneseTokenization createRandom() {
+        return new BertJapaneseTokenization(
+            randomBoolean() ? null : randomBoolean(),
+            randomBoolean() ? null : randomBoolean(),
+            randomBoolean() ? null : randomIntBetween(1, 1024),
+            randomBoolean() ? null : randomFrom(Tokenization.Truncate.values()),
+            null
+        );
+    }
+
+    public static BertJapaneseTokenization createRandomWithSpan() {
+        Tokenization.Truncate truncate = randomBoolean() ? null : randomFrom(Tokenization.Truncate.values());
+        Integer maxSeq = randomBoolean() ? null : randomIntBetween(1, 1024);
+        return new BertJapaneseTokenization(
+            randomBoolean() ? null : randomBoolean(),
+            randomBoolean() ? null : randomBoolean(),
+            maxSeq,
+            truncate,
+            Tokenization.Truncate.NONE.equals(truncate) && randomBoolean() ? randomIntBetween(0, maxSeq != null ? maxSeq - 1 : 100) : null
+        );
+    }
+}

+ 83 - 0
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/BertJapaneseTokenizationUpdateTests.java

@@ -0,0 +1,83 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.core.ml.inference.trainedmodel;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase;
+
+import static org.hamcrest.Matchers.sameInstance;
+
+public class BertJapaneseTokenizationUpdateTests extends AbstractBWCWireSerializationTestCase<BertJapaneseTokenizationUpdate> {
+
+    public static BertJapaneseTokenizationUpdate randomInstance() {
+        Integer span = randomBoolean() ? null : randomIntBetween(8, 128);
+        Tokenization.Truncate truncate = randomBoolean() ? null : randomFrom(Tokenization.Truncate.values());
+
+        if (truncate != Tokenization.Truncate.NONE) {
+            span = null;
+        }
+        return new BertJapaneseTokenizationUpdate(truncate, span);
+    }
+
+    public void testApply() {
+        expectThrows(
+            IllegalArgumentException.class,
+            () -> new BertJapaneseTokenizationUpdate(Tokenization.Truncate.SECOND, 100).apply(BertJapaneseTokenizationTests.createRandom())
+        );
+
+        var updatedSpan = new BertJapaneseTokenizationUpdate(null, 100).apply(
+            new BertJapaneseTokenization(false, false, 512, Tokenization.Truncate.NONE, 50)
+        );
+        assertEquals(new BertJapaneseTokenization(false, false, 512, Tokenization.Truncate.NONE, 100), updatedSpan);
+
+        var updatedTruncate = new BertJapaneseTokenizationUpdate(Tokenization.Truncate.FIRST, null).apply(
+            new BertJapaneseTokenization(true, true, 512, Tokenization.Truncate.SECOND, null)
+        );
+        assertEquals(new BertJapaneseTokenization(true, true, 512, Tokenization.Truncate.FIRST, null), updatedTruncate);
+
+        var updatedNone = new BertJapaneseTokenizationUpdate(Tokenization.Truncate.NONE, null).apply(
+            new BertJapaneseTokenization(true, true, 512, Tokenization.Truncate.SECOND, null)
+        );
+        assertEquals(new BertJapaneseTokenization(true, true, 512, Tokenization.Truncate.NONE, null), updatedNone);
+
+        var unmodified = new BertJapaneseTokenization(true, true, 512, Tokenization.Truncate.NONE, null);
+        assertThat(new BertJapaneseTokenizationUpdate(null, null).apply(unmodified), sameInstance(unmodified));
+    }
+
+    public void testNoop() {
+        assertTrue(new BertJapaneseTokenizationUpdate(null, null).isNoop());
+        assertFalse(new BertJapaneseTokenizationUpdate(Tokenization.Truncate.SECOND, null).isNoop());
+        assertFalse(new BertJapaneseTokenizationUpdate(null, 10).isNoop());
+        assertFalse(new BertJapaneseTokenizationUpdate(Tokenization.Truncate.NONE, 10).isNoop());
+    }
+
+    @Override
+    protected Writeable.Reader<BertJapaneseTokenizationUpdate> instanceReader() {
+        return BertJapaneseTokenizationUpdate::new;
+    }
+
+    @Override
+    protected BertJapaneseTokenizationUpdate createTestInstance() {
+        return randomInstance();
+    }
+
+    @Override
+    protected BertJapaneseTokenizationUpdate mutateInstance(BertJapaneseTokenizationUpdate instance) {
+        return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
+    }
+
+    @Override
+    protected BertJapaneseTokenizationUpdate mutateInstanceForVersion(BertJapaneseTokenizationUpdate instance, TransportVersion version) {
+        if (version.before(TransportVersion.V_8_2_0)) {
+            return new BertJapaneseTokenizationUpdate(instance.getTruncate(), null);
+        }
+
+        return instance;
+    }
+}

+ 2 - 0
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigTestScaffolding.java

@@ -58,6 +58,8 @@ public final class InferenceConfigTestScaffolding {
             return new MPNetTokenizationUpdate(truncate, span);
         } else if (tokenization instanceof RobertaTokenization) {
             return new RobertaTokenizationUpdate(truncate, span);
+        } else if (tokenization instanceof BertJapaneseTokenization) {
+            return new BertJapaneseTokenizationUpdate(truncate, span);
         } else if (tokenization instanceof BertTokenization) {
             return new BertTokenizationUpdate(truncate, span);
         }

+ 4 - 1
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NlpConfigUpdateTests.java

@@ -71,7 +71,10 @@ public class NlpConfigUpdateTests extends ESTestCase {
             ElasticsearchStatusException.class,
             () -> NlpConfigUpdate.tokenizationFromMap(finalConfig)
         );
-        assertThat(e.getMessage(), containsString("unknown tokenization type expecting one of [bert, mpnet, roberta] got [not_bert]"));
+        assertThat(
+            e.getMessage(),
+            containsString("unknown tokenization type expecting one of [bert, bert_ja, mpnet, roberta] got [not_bert]")
+        );
     }
 
     public void testTokenizationFromMap_MpNet() {

+ 7 - 1
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java

@@ -80,6 +80,7 @@ public class BulkUpdateApiKeyRequestTests extends ESTestCase {
     }
 
     public void testRoleDescriptorValidation() {
+        final String[] unknownWorkflows = randomArray(1, 2, String[]::new, () -> randomAlphaOfLengthBetween(4, 10));
         final var request = new BulkUpdateApiKeyRequest(
             randomList(1, 5, () -> randomAlphaOfLength(10)),
             List.of(
@@ -97,7 +98,9 @@ public class BulkUpdateApiKeyRequestTests extends ESTestCase {
                     null,
                     null,
                     Map.of("_key", "value"),
-                    null
+                    null,
+                    null,
+                    new RoleDescriptor.Restriction(unknownWorkflows)
                 )
             ),
             null
@@ -109,5 +112,8 @@ public class BulkUpdateApiKeyRequestTests extends ESTestCase {
         assertThat(ve.validationErrors().get(2), containsStringIgnoringCase("application name"));
         assertThat(ve.validationErrors().get(3), containsStringIgnoringCase("Application privilege names"));
         assertThat(ve.validationErrors().get(4), containsStringIgnoringCase("role descriptor metadata keys may not start with "));
+        for (int i = 0; i < unknownWorkflows.length; i++) {
+            assertThat(ve.validationErrors().get(5 + i), containsStringIgnoringCase("unknown workflow [" + unknownWorkflows[i] + "]"));
+        }
     }
 }

+ 7 - 1
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java

@@ -87,6 +87,7 @@ public class CreateApiKeyRequestTests extends ESTestCase {
     }
 
     public void testRoleDescriptorValidation() {
+        final String[] unknownWorkflows = randomArray(1, 2, String[]::new, () -> randomAlphaOfLengthBetween(4, 10));
         final CreateApiKeyRequest request1 = new CreateApiKeyRequest(
             randomAlphaOfLength(5),
             List.of(
@@ -104,7 +105,9 @@ public class CreateApiKeyRequestTests extends ESTestCase {
                     null,
                     null,
                     Map.of("_key", "value"),
-                    null
+                    null,
+                    null,
+                    new RoleDescriptor.Restriction(unknownWorkflows)
                 )
             ),
             null
@@ -116,6 +119,9 @@ public class CreateApiKeyRequestTests extends ESTestCase {
         assertThat(ve1.validationErrors().get(2), containsStringIgnoringCase("application name"));
         assertThat(ve1.validationErrors().get(3), containsStringIgnoringCase("Application privilege names"));
         assertThat(ve1.validationErrors().get(4), containsStringIgnoringCase("role descriptor metadata keys may not start with "));
+        for (int i = 0; i < unknownWorkflows.length; i++) {
+            assertThat(ve1.validationErrors().get(5 + i), containsStringIgnoringCase("unknown workflow [" + unknownWorkflows[i] + "]"));
+        }
     }
 
     public void testSerialization() throws IOException {

+ 11 - 1
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java

@@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
+import org.elasticsearch.xpack.core.security.authz.restriction.WorkflowResolver;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -71,6 +72,10 @@ public class UpdateApiKeyRequestTests extends ESTestCase {
     }
 
     public void testRoleDescriptorValidation() {
+        final List<String> unknownWorkflows = randomList(1, 2, () -> randomAlphaOfLengthBetween(4, 10));
+        final List<String> workflows = new ArrayList<>(unknownWorkflows.size() + 1);
+        workflows.addAll(unknownWorkflows);
+        workflows.add(WorkflowResolver.SEARCH_APPLICATION_QUERY_WORKFLOW.name());
         final var request1 = new UpdateApiKeyRequest(
             randomAlphaOfLength(10),
             List.of(
@@ -88,7 +93,9 @@ public class UpdateApiKeyRequestTests extends ESTestCase {
                     null,
                     null,
                     Map.of("_key", "value"),
-                    null
+                    null,
+                    null,
+                    new RoleDescriptor.Restriction(workflows.toArray(String[]::new))
                 )
             ),
             null
@@ -100,5 +107,8 @@ public class UpdateApiKeyRequestTests extends ESTestCase {
         assertThat(ve1.validationErrors().get(2), containsStringIgnoringCase("application name"));
         assertThat(ve1.validationErrors().get(3), containsStringIgnoringCase("Application privilege names"));
         assertThat(ve1.validationErrors().get(4), containsStringIgnoringCase("role descriptor metadata keys may not start with "));
+        for (int i = 0; i < unknownWorkflows.size(); i++) {
+            assertThat(ve1.validationErrors().get(5 + i), containsStringIgnoringCase("unknown workflow [" + unknownWorkflows.get(i) + "]"));
+        }
     }
 }

+ 209 - 0
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java

@@ -39,14 +39,21 @@ import org.elasticsearch.test.client.NoOpClient;
 import org.elasticsearch.threadpool.TestThreadPool;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.xcontent.NamedXContentRegistry;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentParserConfiguration;
 import org.elasticsearch.xcontent.XContentType;
+import org.elasticsearch.xpack.core.ilm.DeleteAction;
 import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata;
+import org.elasticsearch.xpack.core.ilm.LifecycleAction;
 import org.elasticsearch.xpack.core.ilm.LifecyclePolicy;
 import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata;
 import org.elasticsearch.xpack.core.ilm.OperationMode;
+import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction;
 import org.junit.After;
 import org.junit.Before;
 
+import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -92,6 +99,9 @@ public class IndexTemplateRegistryTests extends ESTestCase {
             if (action instanceof PutPipelineAction) {
                 assertPutPipelineAction(calledTimes, action, request, listener, "custom-plugin-final_pipeline");
                 return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                // ignore lifecycle policies in this case
+                return AcknowledgedResponse.TRUE;
             } else {
                 // the composable template is not expected to be added, as it's dependency is not available in the cluster state
                 // custom-plugin-settings.json is not expected to be added as it contains a dependency on the default_pipeline
@@ -114,6 +124,9 @@ public class IndexTemplateRegistryTests extends ESTestCase {
             if (action instanceof PutPipelineAction) {
                 assertPutPipelineAction(calledTimes, action, request, listener, "custom-plugin-default_pipeline");
                 return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                // ignore lifecycle policies in this case
+                return AcknowledgedResponse.TRUE;
             } else {
                 // the composable template is not expected to be added, as it's dependency is not available in the cluster state
                 // custom-plugin-settings.json is not expected to be added as it contains a dependency on the default_pipeline
@@ -141,6 +154,9 @@ public class IndexTemplateRegistryTests extends ESTestCase {
             if (action instanceof PutComponentTemplateAction) {
                 assertPutComponentTemplate(calledTimes, action, request, listener);
                 return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                // ignore lifecycle policies in this case
+                return AcknowledgedResponse.TRUE;
             } else {
                 // the composable template is not expected to be added, as it's dependency is not available in the cluster state
                 fail("client called with unexpected request: " + request.toString());
@@ -167,6 +183,9 @@ public class IndexTemplateRegistryTests extends ESTestCase {
             if (action instanceof PutPipelineAction) {
                 assertPutPipelineAction(calledTimes, action, request, listener, "custom-plugin-default_pipeline");
                 return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                // ignore lifecycle policies in this case
+                return AcknowledgedResponse.TRUE;
             } else {
                 // the template is not expected to be added, as the final pipeline is missing
                 fail("client called with unexpected request: " + request.toString());
@@ -193,6 +212,9 @@ public class IndexTemplateRegistryTests extends ESTestCase {
             if (action instanceof PutComposableIndexTemplateAction) {
                 assertPutComposableIndexTemplateAction(calledTimes, action, request, listener);
                 return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                // ignore lifecycle policies in this case
+                return AcknowledgedResponse.TRUE;
             } else if (action instanceof PutPipelineAction) {
                 // ignore pipelines in this case
                 return AcknowledgedResponse.TRUE;
@@ -224,6 +246,9 @@ public class IndexTemplateRegistryTests extends ESTestCase {
                     "custom-plugin-final_pipeline"
                 );
                 return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                // ignore lifecycle policies in this case
+                return AcknowledgedResponse.TRUE;
             } else if (action instanceof PutComponentTemplateAction) {
                 assertPutComponentTemplate(calledTimes, action, request, listener);
                 return AcknowledgedResponse.TRUE;
@@ -255,6 +280,9 @@ public class IndexTemplateRegistryTests extends ESTestCase {
             if (action instanceof PutComposableIndexTemplateAction) {
                 // ignore this
                 return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                // ignore lifecycle policies in this case
+                return AcknowledgedResponse.TRUE;
             } else {
                 fail("client called with unexpected request: " + request.toString());
                 return null;
@@ -271,6 +299,173 @@ public class IndexTemplateRegistryTests extends ESTestCase {
         assertBusy(() -> assertThat(calledTimes.get(), equalTo(0)));
     }
 
+    public void testThatNonExistingPoliciesAreAddedImmediately() throws Exception {
+        DiscoveryNode node = DiscoveryNodeUtils.create("node");
+        DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
+
+        AtomicInteger calledTimes = new AtomicInteger(0);
+        client.setVerifier((action, request, listener) -> {
+            if (action instanceof PutComposableIndexTemplateAction) {
+                // ignore this
+                return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                assertPutLifecycleAction(calledTimes, action, request, listener);
+                return AcknowledgedResponse.TRUE;
+            } else {
+                fail("client called with unexpected request: " + request.toString());
+                return null;
+            }
+        });
+
+        ClusterChangedEvent event = createClusterChangedEvent(
+            Map.of("custom-plugin-settings", 3),
+            Map.of(),
+            Map.of("custom-plugin-default_pipeline", 3, "custom-plugin-final_pipeline", 3),
+            nodes
+        );
+        registry.clusterChanged(event);
+        assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getPolicyConfigs().size())));
+    }
+
+    public void testPolicyAlreadyExists() {
+        DiscoveryNode node = DiscoveryNodeUtils.create("node");
+        DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
+
+        Map<String, LifecyclePolicy> policyMap = new HashMap<>();
+        List<LifecyclePolicy> policies = registry.getPolicyConfigs();
+        assertThat(policies, hasSize(1));
+        policies.forEach(p -> policyMap.put(p.getName(), p));
+
+        client.setVerifier((action, request, listener) -> {
+            if (action instanceof PutComposableIndexTemplateAction) {
+                // ignore this
+                return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                fail("if the policy already exists it should not be re-put");
+            } else {
+                fail("client called with unexpected request: " + request.toString());
+            }
+            return null;
+        });
+
+        ClusterChangedEvent event = createClusterChangedEvent(
+            Map.of("custom-plugin-settings", 3),
+            policyMap,
+            Map.of("custom-plugin-default_pipeline", 3, "custom-plugin-final_pipeline", 3),
+            nodes
+        );
+
+        registry.clusterChanged(event);
+    }
+
+    public void testPolicyAlreadyExistsButDiffers() throws IOException {
+        DiscoveryNode node = DiscoveryNodeUtils.create("node");
+        DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
+
+        Map<String, LifecyclePolicy> policyMap = new HashMap<>();
+        String policyStr = "{\"phases\":{\"delete\":{\"min_age\":\"1m\",\"actions\":{\"delete\":{}}}}}";
+        List<LifecyclePolicy> policies = registry.getPolicyConfigs();
+        assertThat(policies, hasSize(1));
+        policies.forEach(p -> policyMap.put(p.getName(), p));
+
+        client.setVerifier((action, request, listener) -> {
+            if (action instanceof PutComposableIndexTemplateAction) {
+                // ignore this
+                return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                fail("if the policy already exists it should not be re-put");
+            } else {
+                fail("client called with unexpected request: " + request.toString());
+            }
+            return null;
+        });
+
+        try (
+            XContentParser parser = XContentType.JSON.xContent()
+                .createParser(
+                    XContentParserConfiguration.EMPTY.withRegistry(
+                        new NamedXContentRegistry(
+                            List.of(
+                                new NamedXContentRegistry.Entry(
+                                    LifecycleAction.class,
+                                    new ParseField(DeleteAction.NAME),
+                                    DeleteAction::parse
+                                )
+                            )
+                        )
+                    ),
+                    policyStr
+                )
+        ) {
+            LifecyclePolicy different = LifecyclePolicy.parse(parser, policies.get(0).getName());
+            policyMap.put(policies.get(0).getName(), different);
+            ClusterChangedEvent event = createClusterChangedEvent(
+                Map.of("custom-plugin-settings", 3),
+                policyMap,
+                Map.of("custom-plugin-default_pipeline", 3, "custom-plugin-final_pipeline", 3),
+                nodes
+            );
+            registry.clusterChanged(event);
+        }
+    }
+
+    public void testPolicyUpgraded() throws Exception {
+        registry.setPolicyUpgradeRequired(true);
+        DiscoveryNode node = DiscoveryNodeUtils.create("node");
+        DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
+
+        Map<String, LifecyclePolicy> policyMap = new HashMap<>();
+        String priorPolicyStr = "{\"phases\":{\"delete\":{\"min_age\":\"1m\",\"actions\":{\"delete\":{}}}}}";
+        List<LifecyclePolicy> policies = registry.getPolicyConfigs();
+        assertThat(policies, hasSize(1));
+        policies.forEach(p -> policyMap.put(p.getName(), p));
+
+        AtomicInteger calledTimes = new AtomicInteger(0);
+        client.setVerifier((action, request, listener) -> {
+            if (action instanceof PutComposableIndexTemplateAction) {
+                // ignore this
+                return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                assertPutLifecycleAction(calledTimes, action, request, listener);
+                return AcknowledgedResponse.TRUE;
+
+            } else {
+                fail("client called with unexpected request: " + request.toString());
+            }
+            return null;
+        });
+
+        try (
+            XContentParser parser = XContentType.JSON.xContent()
+                .createParser(
+                    XContentParserConfiguration.EMPTY.withRegistry(
+                        new NamedXContentRegistry(
+                            List.of(
+                                new NamedXContentRegistry.Entry(
+                                    LifecycleAction.class,
+                                    new ParseField(DeleteAction.NAME),
+                                    DeleteAction::parse
+                                )
+                            )
+                        )
+                    ),
+                    priorPolicyStr
+                )
+        ) {
+            LifecyclePolicy priorPolicy = LifecyclePolicy.parse(parser, policies.get(0).getName());
+            policyMap.put(policies.get(0).getName(), priorPolicy);
+            ClusterChangedEvent event = createClusterChangedEvent(
+                Map.of("custom-plugin-settings", 3),
+                policyMap,
+                Map.of("custom-plugin-default_pipeline", 3, "custom-plugin-final_pipeline", 3),
+                nodes
+            );
+            registry.clusterChanged(event);
+            // we've changed one policy that should be upgraded
+            assertBusy(() -> assertThat(calledTimes.get(), equalTo(1)));
+        }
+    }
+
     private static void assertPutComponentTemplate(
         AtomicInteger calledTimes,
         ActionType<?> action,
@@ -332,6 +527,20 @@ public class IndexTemplateRegistryTests extends ESTestCase {
         calledTimes.incrementAndGet();
     }
 
+    private static void assertPutLifecycleAction(
+        AtomicInteger calledTimes,
+        ActionType<?> action,
+        ActionRequest request,
+        ActionListener<?> listener
+    ) {
+        assertThat(action, instanceOf(PutLifecycleAction.class));
+        assertThat(request, instanceOf(PutLifecycleAction.Request.class));
+        final PutLifecycleAction.Request putRequest = (PutLifecycleAction.Request) request;
+        assertThat(putRequest.getPolicy().getName(), equalTo("custom-plugin-policy"));
+        assertNotNull(listener);
+        calledTimes.incrementAndGet();
+    }
+
     private ClusterChangedEvent createClusterChangedEvent(Map<String, Integer> existingTemplates, DiscoveryNodes nodes) {
         return createClusterChangedEvent(existingTemplates, Collections.emptyMap(), Collections.emptyMap(), nodes);
     }

+ 21 - 0
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TestRegistryWithCustomPlugin.java

@@ -16,6 +16,7 @@ import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.xcontent.NamedXContentRegistry;
 import org.elasticsearch.xcontent.XContentParserConfiguration;
 import org.elasticsearch.xcontent.json.JsonXContent;
+import org.elasticsearch.xpack.core.ilm.LifecyclePolicy;
 
 import java.io.IOException;
 import java.util.Collections;
@@ -27,6 +28,8 @@ class TestRegistryWithCustomPlugin extends IndexTemplateRegistry {
     public static final int REGISTRY_VERSION = 3;
     public static final String TEMPLATE_VERSION_VARIABLE = "xpack.custom_plugin.template.version";
 
+    private boolean policyUpgradeRequired = false;
+
     TestRegistryWithCustomPlugin(
         Settings nodeSettings,
         ClusterService clusterService,
@@ -88,6 +91,24 @@ class TestRegistryWithCustomPlugin extends IndexTemplateRegistry {
         );
     }
 
+    @Override
+    protected List<LifecyclePolicy> getPolicyConfigs() {
+        return List.of(
+            new LifecyclePolicyConfig("custom-plugin-policy", "/org/elasticsearch/xpack/core/template/custom-plugin-policy.json").load(
+                LifecyclePolicyConfig.DEFAULT_X_CONTENT_REGISTRY
+            )
+        );
+    }
+
+    @Override
+    protected boolean isUpgradeRequired(LifecyclePolicy currentPolicy, LifecyclePolicy newPolicy) {
+        return policyUpgradeRequired;
+    }
+
+    public void setPolicyUpgradeRequired(boolean policyUpgradeRequired) {
+        this.policyUpgradeRequired = policyUpgradeRequired;
+    }
+
     @Override
     protected String getOrigin() {
         return "test";

+ 22 - 0
x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/core/template/custom-plugin-policy.json

@@ -0,0 +1,22 @@
+{
+  "phases": {
+    "hot": {
+      "actions": {
+        "rollover": {
+          "max_primary_shard_size": "50gb",
+          "max_age": "7d"
+        }
+      }
+    },
+    "delete": {
+      "min_age": "30d",
+      "actions": {
+        "delete": {}
+      }
+    }
+  },
+  "_meta": {
+    "description": "default policy for application logs",
+    "managed": true
+  }
+}

+ 4 - 4
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java

@@ -145,10 +145,10 @@ public class EnterpriseSearch extends Plugin implements ActionPlugin, SystemInde
             new RestPutSearchApplicationAction(getLicenseState()),
             new RestDeleteSearchApplicationAction(getLicenseState()),
             new RestQuerySearchApplicationAction(getLicenseState()),
-            new RestPutAnalyticsCollectionAction(),
-            new RestGetAnalyticsCollectionAction(),
-            new RestDeleteAnalyticsCollectionAction(),
-            new RestPostAnalyticsEventAction(),
+            new RestPutAnalyticsCollectionAction(getLicenseState()),
+            new RestGetAnalyticsCollectionAction(getLicenseState()),
+            new RestDeleteAnalyticsCollectionAction(getLicenseState()),
+            new RestPostAnalyticsEventAction(getLicenseState()),
             new RestRenderSearchApplicationQueryAction(getLicenseState())
         );
     }

+ 3 - 3
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/SearchApplicationRestHandler.java → x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java

@@ -5,7 +5,7 @@
  * 2.0.
  */
 
-package org.elasticsearch.xpack.application.search.action;
+package org.elasticsearch.xpack.application;
 
 import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.license.XPackLicenseState;
@@ -16,10 +16,10 @@ import org.elasticsearch.xpack.application.utils.LicenseUtils;
 
 import java.io.IOException;
 
-public abstract class SearchApplicationRestHandler extends BaseRestHandler {
+public abstract class EnterpriseSearchBaseRestHandler extends BaseRestHandler {
     protected final XPackLicenseState licenseState;
 
-    protected SearchApplicationRestHandler(XPackLicenseState licenseState) {
+    protected EnterpriseSearchBaseRestHandler(XPackLicenseState licenseState) {
         this.licenseState = licenseState;
     }
 

+ 7 - 3
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java

@@ -8,12 +8,13 @@
 package org.elasticsearch.xpack.application.analytics.action;
 
 import org.elasticsearch.client.internal.node.NodeClient;
-import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.io.IOException;
 import java.util.List;
@@ -21,7 +22,10 @@ import java.util.List;
 import static org.elasticsearch.rest.RestRequest.Method.DELETE;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestDeleteAnalyticsCollectionAction extends BaseRestHandler {
+public class RestDeleteAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler {
+    public RestDeleteAnalyticsCollectionAction(XPackLicenseState licenseState) {
+        super(licenseState);
+    }
 
     @Override
     public String getName() {
@@ -34,7 +38,7 @@ public class RestDeleteAnalyticsCollectionAction extends BaseRestHandler {
     }
 
     @Override
-    protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
+    protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
         DeleteAnalyticsCollectionAction.Request request = new DeleteAnalyticsCollectionAction.Request(restRequest.param("collection_name"));
         return channel -> client.execute(DeleteAnalyticsCollectionAction.INSTANCE, request, new RestToXContentListener<>(channel));
     }

+ 7 - 3
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java

@@ -9,19 +9,23 @@ package org.elasticsearch.xpack.application.analytics.action;
 
 import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.common.Strings;
-import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.List;
 
 import static org.elasticsearch.rest.RestRequest.Method.GET;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestGetAnalyticsCollectionAction extends BaseRestHandler {
+public class RestGetAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler {
+    public RestGetAnalyticsCollectionAction(XPackLicenseState licenseState) {
+        super(licenseState);
+    }
 
     @Override
     public String getName() {
@@ -37,7 +41,7 @@ public class RestGetAnalyticsCollectionAction extends BaseRestHandler {
     }
 
     @Override
-    protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {
+    protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) {
         GetAnalyticsCollectionAction.Request request = new GetAnalyticsCollectionAction.Request(
             Strings.splitStringByCommaToArray(restRequest.param("collection_name"))
         );

+ 7 - 3
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java

@@ -11,13 +11,14 @@ import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.common.bytes.BytesReference;
 import org.elasticsearch.common.network.InetAddresses;
 import org.elasticsearch.core.Tuple;
-import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestStatusToXContentListener;
 import org.elasticsearch.xcontent.XContentType;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.net.InetAddress;
 import java.util.List;
@@ -26,7 +27,10 @@ import java.util.Map;
 import static org.elasticsearch.rest.RestRequest.Method.POST;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestPostAnalyticsEventAction extends BaseRestHandler {
+public class RestPostAnalyticsEventAction extends EnterpriseSearchBaseRestHandler {
+    public RestPostAnalyticsEventAction(XPackLicenseState licenseState) {
+        super(licenseState);
+    }
 
     public static final String X_FORWARDED_FOR_HEADER = "X-Forwarded-For";
 
@@ -41,7 +45,7 @@ public class RestPostAnalyticsEventAction extends BaseRestHandler {
     }
 
     @Override
-    protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {
+    protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) {
         PostAnalyticsEventAction.Request request = buidRequest(restRequest);
         return channel -> client.execute(PostAnalyticsEventAction.INSTANCE, request, new RestStatusToXContentListener<>(channel));
     }

+ 7 - 3
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java

@@ -8,20 +8,24 @@
 package org.elasticsearch.xpack.application.analytics.action;
 
 import org.elasticsearch.client.internal.node.NodeClient;
-import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.RestHandler;
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestStatusToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.List;
 
 import static org.elasticsearch.rest.RestRequest.Method.PUT;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestPutAnalyticsCollectionAction extends BaseRestHandler {
+public class RestPutAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler {
+    public RestPutAnalyticsCollectionAction(XPackLicenseState licenseState) {
+        super(licenseState);
+    }
 
     @Override
     public String getName() {
@@ -34,7 +38,7 @@ public class RestPutAnalyticsCollectionAction extends BaseRestHandler {
     }
 
     @Override
-    protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {
+    protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) {
         PutAnalyticsCollectionAction.Request request = new PutAnalyticsCollectionAction.Request(restRequest.param("collection_name"));
         String location = routes().get(0).getPath().replace("{collection_name}", request.getName());
         return channel -> client.execute(

+ 2 - 12
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java

@@ -17,20 +17,16 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.tasks.Task;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
-import org.elasticsearch.xpack.application.utils.LicenseUtils;
 
 public class TransportDeleteAnalyticsCollectionAction extends AcknowledgedTransportMasterNodeAction<
     DeleteAnalyticsCollectionAction.Request> {
 
     private final AnalyticsCollectionService analyticsCollectionService;
 
-    private final XPackLicenseState licenseState;
-
     @Inject
     public TransportDeleteAnalyticsCollectionAction(
         TransportService transportService,
@@ -38,8 +34,7 @@ public class TransportDeleteAnalyticsCollectionAction extends AcknowledgedTransp
         ThreadPool threadPool,
         ActionFilters actionFilters,
         IndexNameExpressionResolver indexNameExpressionResolver,
-        AnalyticsCollectionService analyticsCollectionService,
-        XPackLicenseState licenseState
+        AnalyticsCollectionService analyticsCollectionService
     ) {
         super(
             DeleteAnalyticsCollectionAction.NAME,
@@ -52,7 +47,6 @@ public class TransportDeleteAnalyticsCollectionAction extends AcknowledgedTransp
             ThreadPool.Names.SAME
         );
         this.analyticsCollectionService = analyticsCollectionService;
-        this.licenseState = licenseState;
     }
 
     @Override
@@ -67,10 +61,6 @@ public class TransportDeleteAnalyticsCollectionAction extends AcknowledgedTransp
         ClusterState state,
         ActionListener<AcknowledgedResponse> listener
     ) {
-        LicenseUtils.runIfSupportedLicense(
-            licenseState,
-            () -> analyticsCollectionService.deleteAnalyticsCollection(state, request, listener),
-            listener::onFailure
-        );
+        analyticsCollectionService.deleteAnalyticsCollection(state, request, listener.map(v -> AcknowledgedResponse.TRUE));
     }
 }

+ 2 - 12
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java

@@ -15,12 +15,10 @@ import org.elasticsearch.cluster.block.ClusterBlockException;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.tasks.Task;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
-import org.elasticsearch.xpack.application.utils.LicenseUtils;
 
 public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeReadAction<
     GetAnalyticsCollectionAction.Request,
@@ -28,8 +26,6 @@ public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeRe
 
     private final AnalyticsCollectionService analyticsCollectionService;
 
-    private final XPackLicenseState licenseState;
-
     @Inject
     public TransportGetAnalyticsCollectionAction(
         TransportService transportService,
@@ -37,8 +33,7 @@ public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeRe
         ThreadPool threadPool,
         ActionFilters actionFilters,
         IndexNameExpressionResolver indexNameExpressionResolver,
-        AnalyticsCollectionService analyticsCollectionService,
-        XPackLicenseState licenseState
+        AnalyticsCollectionService analyticsCollectionService
     ) {
         super(
             GetAnalyticsCollectionAction.NAME,
@@ -52,7 +47,6 @@ public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeRe
             ThreadPool.Names.SAME
         );
         this.analyticsCollectionService = analyticsCollectionService;
-        this.licenseState = licenseState;
     }
 
     @Override
@@ -62,11 +56,7 @@ public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeRe
         ClusterState state,
         ActionListener<GetAnalyticsCollectionAction.Response> listener
     ) {
-        LicenseUtils.runIfSupportedLicense(
-            licenseState,
-            () -> analyticsCollectionService.getAnalyticsCollection(state, request, listener),
-            listener::onFailure
-        );
+        analyticsCollectionService.getAnalyticsCollection(state, request, listener);
     }
 
     @Override

+ 2 - 22
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java

@@ -11,11 +11,9 @@ import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.support.ActionFilters;
 import org.elasticsearch.action.support.HandledTransportAction;
 import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.tasks.Task;
 import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.xpack.application.analytics.AnalyticsEventIngestService;
-import org.elasticsearch.xpack.application.utils.LicenseUtils;
 
 /**
  * Transport implementation for the {@link PostAnalyticsEventAction}.
@@ -28,40 +26,22 @@ public class TransportPostAnalyticsEventAction extends HandledTransportAction<
 
     private final AnalyticsEventIngestService eventEmitterService;
 
-    private final XPackLicenseState xPackLicenseState;
-
     @Inject
     public TransportPostAnalyticsEventAction(
         TransportService transportService,
         ActionFilters actionFilters,
-        AnalyticsEventIngestService eventEmitterService,
-        XPackLicenseState xPackLicenseState
+        AnalyticsEventIngestService eventEmitterService
     ) {
         super(PostAnalyticsEventAction.NAME, transportService, actionFilters, PostAnalyticsEventAction.Request::new);
         this.eventEmitterService = eventEmitterService;
-        this.xPackLicenseState = xPackLicenseState;
     }
 
-    /**
-     * Executes the actual handling of the action. It calls the {@link LicenseUtils#runIfSupportedLicense} method with
-     * the XPack license state to check if the license is valid. If the license is valid, it calls the
-     * {@link AnalyticsEventIngestService#addEvent} method with the request and listener. Else, it calls the listener's onFailure method
-     * with the appropriate exception.
-     *
-     * @param task The {@link Task} associated with the action.
-     * @param request The {@link PostAnalyticsEventAction.Request} object containing the request parameters.
-     * @param listener The {@link ActionListener} to be called with the response or failure.
-     */
     @Override
     protected void doExecute(
         Task task,
         PostAnalyticsEventAction.Request request,
         ActionListener<PostAnalyticsEventAction.Response> listener
     ) {
-        LicenseUtils.runIfSupportedLicense(
-            xPackLicenseState,
-            () -> this.eventEmitterService.addEvent(request, listener),
-            listener::onFailure
-        );
+        this.eventEmitterService.addEvent(request, listener);
     }
 }

+ 2 - 12
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java

@@ -16,12 +16,10 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
 import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.tasks.Task;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
-import org.elasticsearch.xpack.application.utils.LicenseUtils;
 
 public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAction<
     PutAnalyticsCollectionAction.Request,
@@ -29,8 +27,6 @@ public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAc
 
     private final AnalyticsCollectionService analyticsCollectionService;
 
-    private final XPackLicenseState licenseState;
-
     @Inject
     public TransportPutAnalyticsCollectionAction(
         TransportService transportService,
@@ -38,8 +34,7 @@ public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAc
         ThreadPool threadPool,
         ActionFilters actionFilters,
         IndexNameExpressionResolver indexNameExpressionResolver,
-        AnalyticsCollectionService analyticsCollectionService,
-        XPackLicenseState licenseState
+        AnalyticsCollectionService analyticsCollectionService
     ) {
         super(
             PutAnalyticsCollectionAction.NAME,
@@ -53,7 +48,6 @@ public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAc
             ThreadPool.Names.SAME
         );
         this.analyticsCollectionService = analyticsCollectionService;
-        this.licenseState = licenseState;
     }
 
     @Override
@@ -68,11 +62,7 @@ public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAc
         ClusterState state,
         ActionListener<PutAnalyticsCollectionAction.Response> listener
     ) {
-        LicenseUtils.runIfSupportedLicense(
-            licenseState,
-            () -> analyticsCollectionService.putAnalyticsCollection(state, request, listener),
-            listener::onFailure
-        );
+        analyticsCollectionService.putAnalyticsCollection(state, request, listener);
     }
 
 }

+ 2 - 1
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestDeleteSearchApplicationAction.java

@@ -14,13 +14,14 @@ import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.List;
 
 import static org.elasticsearch.rest.RestRequest.Method.DELETE;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestDeleteSearchApplicationAction extends SearchApplicationRestHandler {
+public class RestDeleteSearchApplicationAction extends EnterpriseSearchBaseRestHandler {
     public RestDeleteSearchApplicationAction(XPackLicenseState licenseState) {
         super(licenseState);
     }

+ 2 - 1
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestGetSearchApplicationAction.java

@@ -14,13 +14,14 @@ import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.List;
 
 import static org.elasticsearch.rest.RestRequest.Method.GET;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestGetSearchApplicationAction extends SearchApplicationRestHandler {
+public class RestGetSearchApplicationAction extends EnterpriseSearchBaseRestHandler {
     public RestGetSearchApplicationAction(XPackLicenseState licenseState) {
         super(licenseState);
     }

+ 2 - 1
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestListSearchApplicationAction.java

@@ -14,6 +14,7 @@ import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 import org.elasticsearch.xpack.core.action.util.PageParams;
 
 import java.util.List;
@@ -21,7 +22,7 @@ import java.util.List;
 import static org.elasticsearch.rest.RestRequest.Method.GET;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestListSearchApplicationAction extends SearchApplicationRestHandler {
+public class RestListSearchApplicationAction extends EnterpriseSearchBaseRestHandler {
     public RestListSearchApplicationAction(XPackLicenseState licenseState) {
         super(licenseState);
     }

+ 2 - 1
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationAction.java

@@ -15,6 +15,7 @@ import org.elasticsearch.rest.Scope;
 import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.io.IOException;
 import java.util.List;
@@ -22,7 +23,7 @@ import java.util.List;
 import static org.elasticsearch.rest.RestRequest.Method.PUT;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestPutSearchApplicationAction extends SearchApplicationRestHandler {
+public class RestPutSearchApplicationAction extends EnterpriseSearchBaseRestHandler {
     public RestPutSearchApplicationAction(XPackLicenseState licenseState) {
         super(licenseState);
     }

+ 2 - 1
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestQuerySearchApplicationAction.java

@@ -15,6 +15,7 @@ import org.elasticsearch.rest.ServerlessScope;
 import org.elasticsearch.rest.action.RestCancellableNodeClient;
 import org.elasticsearch.rest.action.RestChunkedToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.io.IOException;
 import java.util.List;
@@ -23,7 +24,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET;
 import static org.elasticsearch.rest.RestRequest.Method.POST;
 
 @ServerlessScope(Scope.PUBLIC)
-public class RestQuerySearchApplicationAction extends SearchApplicationRestHandler {
+public class RestQuerySearchApplicationAction extends EnterpriseSearchBaseRestHandler {
     public RestQuerySearchApplicationAction(XPackLicenseState licenseState) {
         super(licenseState);
     }

+ 2 - 1
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryAction.java

@@ -12,13 +12,14 @@ import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.action.RestToXContentListener;
 import org.elasticsearch.xpack.application.EnterpriseSearch;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.io.IOException;
 import java.util.List;
 
 import static org.elasticsearch.rest.RestRequest.Method.POST;
 
-public class RestRenderSearchApplicationQueryAction extends SearchApplicationRestHandler {
+public class RestRenderSearchApplicationQueryAction extends EnterpriseSearchBaseRestHandler {
     public RestRenderSearchApplicationQueryAction(XPackLicenseState licenseState) {
         super(licenseState);
     }

+ 0 - 9
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/utils/LicenseUtils.java

@@ -14,8 +14,6 @@ import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.xpack.core.XPackField;
 
-import java.util.function.Consumer;
-
 public final class LicenseUtils {
     public static final LicensedFeature.Momentary LICENSED_ENT_SEARCH_FEATURE = LicensedFeature.momentary(
         null,
@@ -39,11 +37,4 @@ public final class LicenseUtils {
         return e;
     }
 
-    public static void runIfSupportedLicense(XPackLicenseState licenseState, Runnable onSuccess, Consumer<Exception> onFailure) {
-        if (supportedLicense(licenseState)) {
-            onSuccess.run();
-        } else {
-            onFailure.accept(newComplianceException(licenseState));
-        }
-    }
 }

+ 39 - 0
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/AbstractRestEnterpriseSearchActionTests.java

@@ -0,0 +1,39 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.application;
+
+import org.elasticsearch.client.internal.node.NodeClient;
+import org.elasticsearch.license.XPackLicenseState;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.client.NoOpNodeClient;
+import org.elasticsearch.test.rest.FakeRestChannel;
+import org.elasticsearch.test.rest.FakeRestRequest;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.mockito.Mockito.mock;
+
+public abstract class AbstractRestEnterpriseSearchActionTests extends ESTestCase {
+    protected void checkLicenseForRequest(FakeRestRequest request) throws Exception {
+        final XPackLicenseState licenseState = mock(XPackLicenseState.class);
+        final EnterpriseSearchBaseRestHandler action = getRestAction(licenseState);
+
+        final FakeRestChannel channel = new FakeRestChannel(request, true, 1);
+
+        try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName())) {
+            action.handleRequest(request, channel, nodeClient);
+        }
+        assertThat(channel.capturedResponse(), notNullValue());
+        assertThat(channel.capturedResponse().status(), equalTo(RestStatus.FORBIDDEN));
+        assertThat(channel.capturedResponse().content().utf8ToString(), containsString("Current license is non-compliant"));
+    }
+
+    protected abstract EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState);
+}

+ 3 - 3
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/SearchApplicationRestHandlerTests.java → x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java

@@ -5,7 +5,7 @@
  * 2.0.
  */
 
-package org.elasticsearch.xpack.application.search.action;
+package org.elasticsearch.xpack.application;
 
 import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.license.MockLicenseState;
@@ -24,7 +24,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import static org.mockito.Mockito.verifyNoMoreInteractions;
 import static org.mockito.Mockito.when;
 
-public class SearchApplicationRestHandlerTests extends ESTestCase {
+public class EnterpriseSearchBaseRestHandlerTests extends ESTestCase {
     public void testLicenseEnforcement() throws Exception {
         MockLicenseState licenseState = MockLicenseState.createMock();
         final boolean licensedFeature = randomBoolean();
@@ -33,7 +33,7 @@ public class SearchApplicationRestHandlerTests extends ESTestCase {
         when(licenseState.isActive()).thenReturn(licensedFeature);
 
         final AtomicBoolean consumerCalled = new AtomicBoolean(false);
-        SearchApplicationRestHandler handler = new SearchApplicationRestHandler(licenseState) {
+        EnterpriseSearchBaseRestHandler handler = new EnterpriseSearchBaseRestHandler(licenseState) {
 
             @Override
             protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException {

+ 24 - 0
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionActionTests.java

@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.application.analytics.action;
+
+import org.elasticsearch.license.XPackLicenseState;
+import org.elasticsearch.test.rest.FakeRestRequest;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
+
+public class RestDeleteAnalyticsCollectionActionTests extends AbstractRestEnterpriseSearchActionTests {
+    public void testWithNonCompliantLicense() throws Exception {
+        checkLicenseForRequest(new FakeRestRequest());
+    }
+
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestDeleteAnalyticsCollectionAction(licenseState);
+    }
+}

+ 24 - 0
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionActionTests.java

@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.application.analytics.action;
+
+import org.elasticsearch.license.XPackLicenseState;
+import org.elasticsearch.test.rest.FakeRestRequest;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
+
+public class RestGetAnalyticsCollectionActionTests extends AbstractRestEnterpriseSearchActionTests {
+    public void testWithNonCompliantLicense() throws Exception {
+        checkLicenseForRequest(new FakeRestRequest());
+    }
+
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestGetAnalyticsCollectionAction(licenseState);
+    }
+}

+ 37 - 0
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventActionTests.java

@@ -0,0 +1,37 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.application.analytics.action;
+
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.license.XPackLicenseState;
+import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.test.rest.FakeRestRequest;
+import org.elasticsearch.xcontent.NamedXContentRegistry;
+import org.elasticsearch.xcontent.XContentType;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
+
+import java.net.InetSocketAddress;
+import java.util.Map;
+
+public class RestPostAnalyticsEventActionTests extends AbstractRestEnterpriseSearchActionTests {
+    public void testWithNonCompliantLicense() throws Exception {
+        checkLicenseForRequest(
+            new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.POST)
+                .withParams(Map.of("collection_name", "my-collection"))
+                .withContent(new BytesArray("{}"), XContentType.JSON)
+                .withRemoteAddress(new InetSocketAddress(randomIp(randomBoolean()), randomIntBetween(1, 65535)))
+                .build()
+        );
+    }
+
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestPostAnalyticsEventAction(licenseState);
+    }
+}

+ 32 - 0
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionActionTests.java

@@ -0,0 +1,32 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.application.analytics.action;
+
+import org.elasticsearch.license.XPackLicenseState;
+import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.test.rest.FakeRestRequest;
+import org.elasticsearch.xcontent.NamedXContentRegistry;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
+
+import java.util.Map;
+
+public class RestPutAnalyticsCollectionActionTests extends AbstractRestEnterpriseSearchActionTests {
+    public void testWithNonCompliantLicense() throws Exception {
+        checkLicenseForRequest(
+            new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT)
+                .withParams(Map.of("collection_name", "my-collection"))
+                .build()
+        );
+    }
+
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestPutAnalyticsCollectionAction(licenseState);
+    }
+}

+ 0 - 91
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionActionTests.java

@@ -1,91 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-package org.elasticsearch.xpack.application.analytics.action;
-
-import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.action.support.ActionFilters;
-import org.elasticsearch.action.support.master.AcknowledgedResponse;
-import org.elasticsearch.cluster.ClusterState;
-import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
-import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.tasks.Task;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportService;
-import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
-
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.mockLicenseState;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyExceptionIsThrownOnInvalidLicence;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyNoExceptionIsThrown;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyNoResponseIsSent;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
-public class TransportDeleteAnalyticsCollectionActionTests extends ESTestCase {
-
-    public void testWithSupportedLicense() {
-        AnalyticsCollectionService analyticsCollectionService = mock(AnalyticsCollectionService.class);
-
-        TransportDeleteAnalyticsCollectionAction transportAction = createTransportAction(
-            mockLicenseState(true),
-            analyticsCollectionService
-        );
-        DeleteAnalyticsCollectionAction.Request request = mock(DeleteAnalyticsCollectionAction.Request.class);
-
-        ClusterState clusterState = mock(ClusterState.class);
-
-        @SuppressWarnings("unchecked")
-        ActionListener<AcknowledgedResponse> listener = mock(ActionListener.class);
-
-        transportAction.masterOperation(mock(Task.class), request, clusterState, listener);
-
-        verify(analyticsCollectionService, times(1)).deleteAnalyticsCollection(clusterState, request, listener);
-        verifyNoExceptionIsThrown(listener);
-    }
-
-    public void testWithUnsupportedLicense() {
-        AnalyticsCollectionService analyticsCollectionService = mock(AnalyticsCollectionService.class);
-
-        TransportDeleteAnalyticsCollectionAction transportAction = createTransportAction(
-            mockLicenseState(false),
-            analyticsCollectionService
-        );
-        DeleteAnalyticsCollectionAction.Request request = mock(DeleteAnalyticsCollectionAction.Request.class);
-
-        ClusterState clusterState = mock(ClusterState.class);
-
-        @SuppressWarnings("unchecked")
-        ActionListener<AcknowledgedResponse> listener = mock(ActionListener.class);
-
-        transportAction.masterOperation(mock(Task.class), request, clusterState, listener);
-
-        verify(analyticsCollectionService, never()).putAnalyticsCollection(any(), any(), any());
-
-        verifyNoResponseIsSent(listener);
-        verifyExceptionIsThrownOnInvalidLicence(listener);
-    }
-
-    private TransportDeleteAnalyticsCollectionAction createTransportAction(
-        XPackLicenseState licenseState,
-        AnalyticsCollectionService analyticsCollectionService
-    ) {
-        return new TransportDeleteAnalyticsCollectionAction(
-            mock(TransportService.class),
-            mock(ClusterService.class),
-            mock(ThreadPool.class),
-            mock(ActionFilters.class),
-            mock(IndexNameExpressionResolver.class),
-            analyticsCollectionService,
-            licenseState
-        );
-    }
-}

+ 0 - 83
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionActionTests.java

@@ -1,83 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-package org.elasticsearch.xpack.application.analytics.action;
-
-import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.action.support.ActionFilters;
-import org.elasticsearch.cluster.ClusterState;
-import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
-import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.tasks.Task;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportService;
-import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
-
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.mockLicenseState;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyExceptionIsThrownOnInvalidLicence;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyNoExceptionIsThrown;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyNoResponseIsSent;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
-public class TransportGetAnalyticsCollectionActionTests extends ESTestCase {
-
-    public void testWithSupportedLicense() {
-        AnalyticsCollectionService analyticsCollectionService = mock(AnalyticsCollectionService.class);
-
-        TransportGetAnalyticsCollectionAction transportAction = createTransportAction(mockLicenseState(true), analyticsCollectionService);
-        GetAnalyticsCollectionAction.Request request = mock(GetAnalyticsCollectionAction.Request.class);
-
-        ClusterState clusterState = mock(ClusterState.class);
-
-        @SuppressWarnings("unchecked")
-        ActionListener<GetAnalyticsCollectionAction.Response> listener = mock(ActionListener.class);
-
-        transportAction.masterOperation(mock(Task.class), request, clusterState, listener);
-
-        verify(analyticsCollectionService, times(1)).getAnalyticsCollection(clusterState, request, listener);
-        verifyNoExceptionIsThrown(listener);
-    }
-
-    public void testWithUnsupportedLicense() {
-        AnalyticsCollectionService analyticsCollectionService = mock(AnalyticsCollectionService.class);
-
-        TransportGetAnalyticsCollectionAction transportAction = createTransportAction(mockLicenseState(false), analyticsCollectionService);
-        GetAnalyticsCollectionAction.Request request = mock(GetAnalyticsCollectionAction.Request.class);
-
-        ClusterState clusterState = mock(ClusterState.class);
-
-        @SuppressWarnings("unchecked")
-        ActionListener<GetAnalyticsCollectionAction.Response> listener = mock(ActionListener.class);
-
-        transportAction.masterOperation(mock(Task.class), request, clusterState, listener);
-
-        verifyExceptionIsThrownOnInvalidLicence(listener);
-        verifyNoResponseIsSent(listener);
-        verify(analyticsCollectionService, never()).getAnalyticsCollection(any(), any(), any());
-    }
-
-    private TransportGetAnalyticsCollectionAction createTransportAction(
-        XPackLicenseState licenseState,
-        AnalyticsCollectionService analyticsCollectionService
-    ) {
-        return new TransportGetAnalyticsCollectionAction(
-            mock(TransportService.class),
-            mock(ClusterService.class),
-            mock(ThreadPool.class),
-            mock(ActionFilters.class),
-            mock(IndexNameExpressionResolver.class),
-            analyticsCollectionService,
-            licenseState
-        );
-    }
-}

+ 0 - 68
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventActionTests.java

@@ -1,68 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-package org.elasticsearch.xpack.application.analytics.action;
-
-import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.action.support.ActionFilters;
-import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.tasks.Task;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.transport.TransportService;
-import org.elasticsearch.xpack.application.analytics.AnalyticsEventIngestService;
-
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.mockLicenseState;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyExceptionIsThrownOnInvalidLicence;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyNoExceptionIsThrown;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
-public class TransportPostAnalyticsEventActionTests extends ESTestCase {
-
-    public void testWithSupportedLicense() {
-        AnalyticsEventIngestService eventEmitter = mock(AnalyticsEventIngestService.class);
-
-        TransportPostAnalyticsEventAction transportAction = createTransportAction(mockLicenseState(true), eventEmitter);
-        PostAnalyticsEventAction.Request request = mock(PostAnalyticsEventAction.Request.class);
-
-        @SuppressWarnings("unchecked")
-        ActionListener<PostAnalyticsEventAction.Response> listener = mock(ActionListener.class);
-
-        transportAction.doExecute(mock(Task.class), request, listener);
-        verify(eventEmitter, times(1)).addEvent(request, listener);
-        verifyNoExceptionIsThrown(listener);
-    }
-
-    public void testWithUnsupportedLicense() {
-        AnalyticsEventIngestService eventEmitter = mock(AnalyticsEventIngestService.class);
-
-        TransportPostAnalyticsEventAction transportAction = createTransportAction(mockLicenseState(false), eventEmitter);
-        PostAnalyticsEventAction.Request request = mock(PostAnalyticsEventAction.Request.class);
-
-        @SuppressWarnings("unchecked")
-        ActionListener<PostAnalyticsEventAction.Response> listener = mock(ActionListener.class);
-
-        transportAction.doExecute(mock(Task.class), request, listener);
-
-        verify(eventEmitter, never()).addEvent(request, listener);
-        verifyExceptionIsThrownOnInvalidLicence(listener);
-    }
-
-    private TransportPostAnalyticsEventAction createTransportAction(
-        XPackLicenseState xPackLicenseState,
-        AnalyticsEventIngestService eventEmitter
-    ) {
-        return new TransportPostAnalyticsEventAction(
-            mock(TransportService.class),
-            mock(ActionFilters.class),
-            eventEmitter,
-            xPackLicenseState
-        );
-    }
-}

+ 0 - 87
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionActionTests.java

@@ -1,87 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-package org.elasticsearch.xpack.application.analytics.action;
-
-import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.action.support.ActionFilters;
-import org.elasticsearch.cluster.ClusterState;
-import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
-import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.tasks.Task;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportService;
-import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService;
-
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.mockLicenseState;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyExceptionIsThrownOnInvalidLicence;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyNoExceptionIsThrown;
-import static org.elasticsearch.xpack.application.analytics.action.AnalyticsTransportActionTestUtils.verifyNoResponseIsSent;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
-public class TransportPutAnalyticsCollectionActionTests extends ESTestCase {
-
-    public void testWithSupportedLicense() {
-        AnalyticsCollectionService analyticsCollectionService = mock(AnalyticsCollectionService.class);
-
-        TransportPutAnalyticsCollectionAction transportAction = createTransportAction(mockLicenseState(true), analyticsCollectionService);
-        PutAnalyticsCollectionAction.Request request = mock(PutAnalyticsCollectionAction.Request.class);
-
-        ClusterState clusterState = mock(ClusterState.class);
-
-        @SuppressWarnings("unchecked")
-        ActionListener<PutAnalyticsCollectionAction.Response> listener = mock(ActionListener.class);
-
-        transportAction.masterOperation(mock(Task.class), request, clusterState, listener);
-
-        verify(analyticsCollectionService, times(1)).putAnalyticsCollection(clusterState, request, listener);
-        verifyNoExceptionIsThrown(listener);
-    }
-
-    public void testWithUnsupportedLicense() {
-        AnalyticsCollectionService analyticsCollectionService = mock(AnalyticsCollectionService.class);
-
-        TransportPutAnalyticsCollectionAction transportAction = createTransportAction(
-            AnalyticsTransportActionTestUtils.mockLicenseState(false),
-            analyticsCollectionService
-        );
-        PutAnalyticsCollectionAction.Request request = mock(PutAnalyticsCollectionAction.Request.class);
-
-        ClusterState clusterState = mock(ClusterState.class);
-
-        @SuppressWarnings("unchecked")
-        ActionListener<PutAnalyticsCollectionAction.Response> listener = mock(ActionListener.class);
-
-        transportAction.masterOperation(mock(Task.class), request, clusterState, listener);
-
-        verify(analyticsCollectionService, never()).putAnalyticsCollection(any(), any(), any());
-
-        verifyNoResponseIsSent(listener);
-        verifyExceptionIsThrownOnInvalidLicence(listener);
-    }
-
-    private TransportPutAnalyticsCollectionAction createTransportAction(
-        XPackLicenseState licenseState,
-        AnalyticsCollectionService analyticsCollectionService
-    ) {
-        return new TransportPutAnalyticsCollectionAction(
-            mock(TransportService.class),
-            mock(ClusterService.class),
-            mock(ThreadPool.class),
-            mock(ActionFilters.class),
-            mock(IndexNameExpressionResolver.class),
-            analyticsCollectionService,
-            licenseState
-        );
-    }
-}

+ 10 - 24
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestDeleteSearchApplicationActionTests.java

@@ -7,37 +7,23 @@
 
 package org.elasticsearch.xpack.application.search.action;
 
-import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.client.NoOpNodeClient;
-import org.elasticsearch.test.rest.FakeRestChannel;
 import org.elasticsearch.test.rest.FakeRestRequest;
 import org.elasticsearch.xcontent.NamedXContentRegistry;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.Map;
 
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.notNullValue;
-import static org.mockito.Mockito.mock;
-
-public class RestDeleteSearchApplicationActionTests extends ESTestCase {
+public class RestDeleteSearchApplicationActionTests extends AbstractRestEnterpriseSearchActionTests {
     public void testWithNonCompliantLicense() throws Exception {
-        final XPackLicenseState licenseState = mock(XPackLicenseState.class);
-        final RestDeleteSearchApplicationAction action = new RestDeleteSearchApplicationAction(licenseState);
-
-        final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(
-            Map.of("name", "my-search-application")
-        ).build();
-        final FakeRestChannel channel = new FakeRestChannel(request, true, 1);
+        checkLicenseForRequest(
+            new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(Map.of("name", "my-search-application")).build()
+        );
+    }
 
-        try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName())) {
-            action.handleRequest(request, channel, nodeClient);
-        }
-        assertThat(channel.capturedResponse(), notNullValue());
-        assertThat(channel.capturedResponse().status(), equalTo(RestStatus.FORBIDDEN));
-        assertThat(channel.capturedResponse().content().utf8ToString(), containsString("Current license is non-compliant"));
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestDeleteSearchApplicationAction(licenseState);
     }
 }

+ 11 - 24
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestGetSearchApplicationActionTests.java

@@ -7,37 +7,24 @@
 
 package org.elasticsearch.xpack.application.search.action;
 
-import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.client.NoOpNodeClient;
-import org.elasticsearch.test.rest.FakeRestChannel;
 import org.elasticsearch.test.rest.FakeRestRequest;
 import org.elasticsearch.xcontent.NamedXContentRegistry;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.Map;
 
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.notNullValue;
-import static org.mockito.Mockito.mock;
-
-public class RestGetSearchApplicationActionTests extends ESTestCase {
+public class RestGetSearchApplicationActionTests extends AbstractRestEnterpriseSearchActionTests {
     public void testWithNonCompliantLicense() throws Exception {
-        final XPackLicenseState licenseState = mock(XPackLicenseState.class);
-        final RestGetSearchApplicationAction action = new RestGetSearchApplicationAction(licenseState);
-
-        final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(
-            Map.of("name", "my-search-application")
-        ).build();
-        final FakeRestChannel channel = new FakeRestChannel(request, true, 1);
+        checkLicenseForRequest(
+            new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(Map.of("name", "my-search-application")).build()
+        );
+    }
 
-        try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName())) {
-            action.handleRequest(request, channel, nodeClient);
-        }
-        assertThat(channel.capturedResponse(), notNullValue());
-        assertThat(channel.capturedResponse().status(), equalTo(RestStatus.FORBIDDEN));
-        assertThat(channel.capturedResponse().content().utf8ToString(), containsString("Current license is non-compliant"));
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestGetSearchApplicationAction(licenseState);
     }
+
 }

+ 8 - 22
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestListSearchApplicationActionTests.java

@@ -7,32 +7,18 @@
 
 package org.elasticsearch.xpack.application.search.action;
 
-import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.client.NoOpNodeClient;
-import org.elasticsearch.test.rest.FakeRestChannel;
 import org.elasticsearch.test.rest.FakeRestRequest;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.notNullValue;
-import static org.mockito.Mockito.mock;
-
-public class RestListSearchApplicationActionTests extends ESTestCase {
+public class RestListSearchApplicationActionTests extends AbstractRestEnterpriseSearchActionTests {
     public void testWithNonCompliantLicense() throws Exception {
-        final XPackLicenseState licenseState = mock(XPackLicenseState.class);
-        final RestListSearchApplicationAction action = new RestListSearchApplicationAction(licenseState);
-
-        final FakeRestRequest request = new FakeRestRequest();
-        final FakeRestChannel channel = new FakeRestChannel(request, true, 1);
+        checkLicenseForRequest(new FakeRestRequest());
+    }
 
-        try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName())) {
-            action.handleRequest(request, channel, nodeClient);
-        }
-        assertThat(channel.capturedResponse(), notNullValue());
-        assertThat(channel.capturedResponse().status(), equalTo(RestStatus.FORBIDDEN));
-        assertThat(channel.capturedResponse().content().utf8ToString(), containsString("Current license is non-compliant"));
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestListSearchApplicationAction(licenseState);
     }
 }

+ 13 - 25
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationActionTests.java

@@ -7,41 +7,29 @@
 
 package org.elasticsearch.xpack.application.search.action;
 
-import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.common.bytes.BytesArray;
 import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.RestRequest;
-import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.client.NoOpNodeClient;
-import org.elasticsearch.test.rest.FakeRestChannel;
 import org.elasticsearch.test.rest.FakeRestRequest;
 import org.elasticsearch.xcontent.NamedXContentRegistry;
 import org.elasticsearch.xcontent.XContentType;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.Map;
 
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.notNullValue;
-import static org.mockito.Mockito.mock;
-
-public class RestPutSearchApplicationActionTests extends ESTestCase {
+public class RestPutSearchApplicationActionTests extends AbstractRestEnterpriseSearchActionTests {
     public void testWithNonCompliantLicense() throws Exception {
-        final XPackLicenseState licenseState = mock(XPackLicenseState.class);
-        final RestPutSearchApplicationAction action = new RestPutSearchApplicationAction(licenseState);
-
-        final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT)
-            .withParams(Map.of("name", "my-app"))
-            .withContent(new BytesArray("{\"indices\": [\"my-index\"]}"), XContentType.JSON)
-            .build();
-        final FakeRestChannel channel = new FakeRestChannel(request, true, 1);
+        checkLicenseForRequest(
+            new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT)
+                .withParams(Map.of("name", "my-app"))
+                .withContent(new BytesArray("{\"indices\": [\"my-index\"]}"), XContentType.JSON)
+                .build()
+        );
+    }
 
-        try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName())) {
-            action.handleRequest(request, channel, nodeClient);
-        }
-        assertThat(channel.capturedResponse(), notNullValue());
-        assertThat(channel.capturedResponse().status(), equalTo(RestStatus.FORBIDDEN));
-        assertThat(channel.capturedResponse().content().utf8ToString(), containsString("Current license is non-compliant"));
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestPutSearchApplicationAction(licenseState);
     }
 }

+ 10 - 24
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestQuerySearchApplicationActionTests.java

@@ -7,37 +7,23 @@
 
 package org.elasticsearch.xpack.application.search.action;
 
-import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.client.NoOpNodeClient;
-import org.elasticsearch.test.rest.FakeRestChannel;
 import org.elasticsearch.test.rest.FakeRestRequest;
 import org.elasticsearch.xcontent.NamedXContentRegistry;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.Map;
 
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.notNullValue;
-import static org.mockito.Mockito.mock;
-
-public class RestQuerySearchApplicationActionTests extends ESTestCase {
+public class RestQuerySearchApplicationActionTests extends AbstractRestEnterpriseSearchActionTests {
     public void testWithNonCompliantLicense() throws Exception {
-        final XPackLicenseState licenseState = mock(XPackLicenseState.class);
-        final RestQuerySearchApplicationAction action = new RestQuerySearchApplicationAction(licenseState);
-
-        final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(
-            Map.of("name", "my-search-application")
-        ).build();
-        final FakeRestChannel channel = new FakeRestChannel(request, true, 1);
+        checkLicenseForRequest(
+            new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(Map.of("name", "my-search-application")).build()
+        );
+    }
 
-        try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName())) {
-            action.handleRequest(request, channel, nodeClient);
-        }
-        assertThat(channel.capturedResponse(), notNullValue());
-        assertThat(channel.capturedResponse().status(), equalTo(RestStatus.FORBIDDEN));
-        assertThat(channel.capturedResponse().content().utf8ToString(), containsString("Current license is non-compliant"));
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestQuerySearchApplicationAction(licenseState);
     }
 }

+ 10 - 25
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryActionTests.java

@@ -7,38 +7,23 @@
 
 package org.elasticsearch.xpack.application.search.action;
 
-import org.elasticsearch.client.internal.node.NodeClient;
 import org.elasticsearch.license.XPackLicenseState;
-import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.client.NoOpNodeClient;
-import org.elasticsearch.test.rest.FakeRestChannel;
 import org.elasticsearch.test.rest.FakeRestRequest;
 import org.elasticsearch.xcontent.NamedXContentRegistry;
+import org.elasticsearch.xpack.application.AbstractRestEnterpriseSearchActionTests;
+import org.elasticsearch.xpack.application.EnterpriseSearchBaseRestHandler;
 
 import java.util.Map;
 
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.notNullValue;
-import static org.mockito.Mockito.mock;
-
-public class RestRenderSearchApplicationQueryActionTests extends ESTestCase {
+public class RestRenderSearchApplicationQueryActionTests extends AbstractRestEnterpriseSearchActionTests {
     public void testWithNonCompliantLicense() throws Exception {
-        final XPackLicenseState licenseState = mock(XPackLicenseState.class);
-        final RestRenderSearchApplicationQueryAction action = new RestRenderSearchApplicationQueryAction(licenseState);
-
-        final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(
-            Map.of("name", "my-search-application")
-        ).build();
-
-        final FakeRestChannel channel = new FakeRestChannel(request, true, 1);
+        checkLicenseForRequest(
+            new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(Map.of("name", "my-search-application")).build()
+        );
+    }
 
-        try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName())) {
-            action.handleRequest(request, channel, nodeClient);
-        }
-        assertThat(channel.capturedResponse(), notNullValue());
-        assertThat(channel.capturedResponse().status(), equalTo(RestStatus.FORBIDDEN));
-        assertThat(channel.capturedResponse().content().utf8ToString(), containsString("Current license is non-compliant"));
+    @Override
+    protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
+        return new RestRenderSearchApplicationQueryAction(licenseState);
     }
 }

+ 1 - 0
x-pack/plugin/ml/build.gradle

@@ -75,6 +75,7 @@ dependencies {
   api "org.apache.commons:commons-math3:3.6.1"
   api "com.ibm.icu:icu4j:${versions.icu4j}"
   api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}"
+  api "org.apache.lucene:lucene-analysis-kuromoji:${versions.lucene}"
   implementation 'org.ojalgo:ojalgo:51.2.0'
   nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:deps@zip") {
     changing = true

+ 1 - 0
x-pack/plugin/ml/src/main/java/module-info.java

@@ -24,6 +24,7 @@ module org.elasticsearch.ml {
     requires org.apache.logging.log4j;
     requires org.apache.lucene.core;
     requires org.apache.lucene.join;
+    requires org.apache.lucene.analysis.kuromoji;
     requires commons.math3;
     requires ojalgo;
 

+ 77 - 0
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertJapaneseTokenizer.java

@@ -0,0 +1,77 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.ml.inference.nlp.tokenizers;
+
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+import java.util.SortedMap;
+
+public class BertJapaneseTokenizer extends BertTokenizer {
+    protected BertJapaneseTokenizer(
+        List<String> originalVocab,
+        SortedMap<String, Integer> vocab,
+        boolean doLowerCase,
+        boolean doTokenizeCjKChars,
+        boolean doStripAccents,
+        boolean withSpecialTokens,
+        int maxSequenceLength,
+        Set<String> neverSplit
+    ) {
+        super(originalVocab, vocab, doLowerCase, doTokenizeCjKChars, doStripAccents, withSpecialTokens, maxSequenceLength, neverSplit);
+    }
+
+    @Override
+    protected WordPieceAnalyzer createWordPieceAnalyzer(
+        List<String> vocabulary,
+        List<String> neverSplit,
+        boolean doLowerCase,
+        boolean doTokenizeCjKChars,
+        boolean doStripAccents,
+        String unknownToken
+    ) {
+        return new JapaneseWordPieceAnalyzer(vocabulary, new ArrayList<>(neverSplit), doLowerCase, doStripAccents, unknownToken);
+    }
+
+    public static Builder builder(List<String> vocab, Tokenization tokenization) {
+        return new JapaneseBuilder(vocab, tokenization);
+    }
+
+    public static class JapaneseBuilder extends BertTokenizer.Builder {
+
+        protected JapaneseBuilder(List<String> vocab, Tokenization tokenization) {
+            super(vocab, tokenization);
+        }
+
+        @Override
+        public BertTokenizer build() {
+            // if not set strip accents defaults to the value of doLowerCase
+            if (doStripAccents == null) {
+                doStripAccents = doLowerCase;
+            }
+
+            if (neverSplit == null) {
+                neverSplit = Collections.emptySet();
+            }
+
+            return new BertJapaneseTokenizer(
+                originalVocab,
+                vocab,
+                doLowerCase,
+                doTokenizeCjKChars,
+                doStripAccents,
+                withSpecialTokens,
+                maxSequenceLength,
+                neverSplit
+            );
+        }
+    }
+}

+ 19 - 1
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java

@@ -98,7 +98,7 @@ public class BertTokenizer extends NlpTokenizer {
         String maskToken,
         String unknownToken
     ) {
-        wordPieceAnalyzer = new WordPieceAnalyzer(
+        wordPieceAnalyzer = createWordPieceAnalyzer(
             originalVocab,
             new ArrayList<>(neverSplit),
             doLowerCase,
@@ -135,6 +135,24 @@ public class BertTokenizer extends NlpTokenizer {
         this.unknownToken = unknownToken;
     }
 
+    protected WordPieceAnalyzer createWordPieceAnalyzer(
+        List<String> vocabulary,
+        List<String> neverSplit,
+        boolean doLowerCase,
+        boolean doTokenizeCjKChars,
+        boolean doStripAccents,
+        String unknownToken
+    ) {
+        return new WordPieceAnalyzer(
+            vocabulary,
+            new ArrayList<>(neverSplit),
+            doLowerCase,
+            doTokenizeCjKChars,
+            doStripAccents,
+            unknownToken
+        );
+    }
+
     @Override
     int sepTokenId() {
         return sepTokenId;

+ 31 - 0
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/JapaneseWordPieceAnalyzer.java

@@ -0,0 +1,31 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.ml.inference.nlp.tokenizers;
+
+import org.apache.lucene.analysis.Tokenizer;
+import org.apache.lucene.analysis.ja.JapaneseTokenizer;
+
+import java.util.List;
+
+public class JapaneseWordPieceAnalyzer extends WordPieceAnalyzer {
+
+    public JapaneseWordPieceAnalyzer(
+        List<String> vocabulary,
+        List<String> neverSplit,
+        boolean doLowerCase,
+        boolean doStripAccents,
+        String unknownToken
+    ) {
+        // For Japanese text with JapaneseTokenizer(morphological analyzer), always disable the punctuation (doTokenizeCjKChars=false)
+        super(vocabulary, neverSplit, doLowerCase, false, doStripAccents, unknownToken);
+    }
+
+    protected Tokenizer createTokenizer() {
+        return new JapaneseTokenizer(null, false, JapaneseTokenizer.Mode.SEARCH);
+    }
+}

+ 4 - 0
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java

@@ -9,6 +9,7 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers;
 
 import org.elasticsearch.core.Releasable;
 import org.elasticsearch.core.Strings;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertJapaneseTokenization;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.MPNetTokenization;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RobertaTokenization;
@@ -390,6 +391,9 @@ public abstract class NlpTokenizer implements Releasable {
         if (params instanceof BertTokenization) {
             return BertTokenizer.builder(vocabulary.get(), params).build();
         }
+        if (params instanceof BertJapaneseTokenization) {
+            return BertJapaneseTokenizer.builder(vocabulary.get(), params).build();
+        }
         if (params instanceof MPNetTokenization) {
             return MPNetTokenizer.mpBuilder(vocabulary.get(), params).build();
         }

+ 6 - 1
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java

@@ -8,6 +8,7 @@
 package org.elasticsearch.xpack.ml.inference.nlp.tokenizers;
 
 import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 
 import java.io.IOException;
@@ -40,10 +41,14 @@ public class WordPieceAnalyzer extends Analyzer {
         this.unknownToken = unknownToken;
     }
 
+    protected Tokenizer createTokenizer() {
+        return new WhitespaceTokenizer(512);
+    }
+
     @Override
     protected TokenStreamComponents createComponents(String fieldName) {
         try {
-            WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(512);
+            Tokenizer tokenizer = createTokenizer();
             innerTokenFilter = WordPieceTokenFilter.build(
                 doLowerCase,
                 doTokenizeCjKChars,

+ 103 - 0
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertJapaneseTokenizerTests.java

@@ -0,0 +1,103 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.ml.inference.nlp.tokenizers;
+
+import org.apache.lucene.analysis.Tokenizer;
+import org.apache.lucene.analysis.ja.JapaneseTokenizer;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.core.IsInstanceOf.instanceOf;
+
+public class BertJapaneseTokenizerTests extends ESTestCase {
+
+    public static final List<String> TEST_CASED_VOCAB = List.of(
+        "日本",
+        "語",
+        "使",
+        "テスト",
+        "、",
+        "。",
+        "Bert",
+        "##Japanese",
+        "##Tokenizer",
+        "Elastic",
+        "##search",
+        BertTokenizer.CLASS_TOKEN,
+        BertTokenizer.SEPARATOR_TOKEN,
+        BertTokenizer.MASK_TOKEN,
+        BertTokenizer.UNKNOWN_TOKEN,
+        BertTokenizer.PAD_TOKEN
+    );
+
+    private List<String> tokenStrings(List<? extends DelimitedToken> tokens) {
+        return tokens.stream().map(DelimitedToken::toString).collect(Collectors.toList());
+    }
+
+    public void testTokenize() {
+        try (
+            BertTokenizer tokenizer = BertJapaneseTokenizer.builder(
+                TEST_CASED_VOCAB,
+                new BertTokenization(null, false, null, Tokenization.Truncate.NONE, -1)
+            ).build()
+        ) {
+
+            String msg = "日本語で、ElasticsearchのBertJapaneseTokenizerを使うテスト。";
+            TokenizationResult.Tokens tokenization = tokenizer.tokenize(msg, Tokenization.Truncate.NONE, -1, 0).get(0);
+
+            assertThat(
+                tokenStrings(tokenization.tokens().get(0)),
+                contains(
+                    "日本",
+                    "語",
+                    BertTokenizer.UNKNOWN_TOKEN,
+                    "、",
+                    "Elastic",
+                    "##search",
+                    BertTokenizer.UNKNOWN_TOKEN,
+                    "Bert",
+                    "##Japanese",
+                    "##Tokenizer",
+                    BertTokenizer.UNKNOWN_TOKEN,
+                    BertTokenizer.UNKNOWN_TOKEN,
+                    "テスト",
+                    "。"
+                )
+            );
+            assertArrayEquals(new int[] { 0, 1, 14, 4, 9, 10, 14, 6, 7, 8, 14, 14, 3, 5 }, tokenization.tokenIds());
+            assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 4, 5, 6, 6, 6, 7, 8, 9, 10 }, tokenization.tokenMap());
+        }
+    }
+
+    public void testCreateAnalyzer() {
+        try (
+            BertTokenizer tokenizer = BertJapaneseTokenizer.builder(
+                TEST_CASED_VOCAB,
+                new BertTokenization(null, false, null, Tokenization.Truncate.NONE, -1)
+            ).build()
+        ) {
+            WordPieceAnalyzer analyzer = tokenizer.createWordPieceAnalyzer(
+                TEST_CASED_VOCAB,
+                Collections.emptyList(),
+                false,
+                false,
+                false,
+                BertTokenizer.UNKNOWN_TOKEN
+            );
+            assertThat(analyzer, instanceOf(JapaneseWordPieceAnalyzer.class));
+            Tokenizer preTokenizer = analyzer.createTokenizer();
+            assertThat(preTokenizer, instanceOf(JapaneseTokenizer.class));
+        }
+    }
+}

+ 24 - 0
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java

@@ -7,6 +7,8 @@
 
 package org.elasticsearch.xpack.ml.inference.nlp.tokenizers;
 
+import org.apache.lucene.analysis.Tokenizer;
+import org.apache.lucene.analysis.core.WhitespaceTokenizer;
 import org.elasticsearch.ElasticsearchStatusException;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization;
@@ -22,6 +24,7 @@ import static org.hamcrest.Matchers.contains;
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.instanceOf;
 
 public class BertTokenizerTests extends ESTestCase {
 
@@ -647,4 +650,25 @@ public class BertTokenizerTests extends ESTestCase {
             assertArrayEquals(new int[] { 0, 1 }, tokenization.tokenMap());
         }
     }
+
+    public void testCreateAnalyzer() {
+        try (
+            BertTokenizer tokenizer = BertTokenizer.builder(
+                TEST_CASED_VOCAB,
+                new BertTokenization(null, false, null, Tokenization.Truncate.NONE, -1)
+            ).build()
+        ) {
+            WordPieceAnalyzer analyzer = tokenizer.createWordPieceAnalyzer(
+                TEST_CASED_VOCAB,
+                Collections.emptyList(),
+                false,
+                false,
+                false,
+                BertTokenizer.UNKNOWN_TOKEN
+            );
+            assertThat(analyzer, instanceOf(WordPieceAnalyzer.class));
+            Tokenizer preTokenizer = analyzer.createTokenizer();
+            assertThat(preTokenizer, instanceOf(WhitespaceTokenizer.class));
+        }
+    }
 }

+ 65 - 0
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizerTests.java

@@ -0,0 +1,65 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.ml.inference.nlp.tokenizers;
+
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertJapaneseTokenization;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.MPNetTokenization;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RobertaTokenization;
+import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization;
+import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary;
+
+import java.util.List;
+
+import static org.hamcrest.core.IsInstanceOf.instanceOf;
+
+public class NlpTokenizerTests extends ESTestCase {
+
+    public static final List<String> BERT_REQUIRED_VOCAB = List.of(
+        BertTokenizer.CLASS_TOKEN,
+        BertTokenizer.SEPARATOR_TOKEN,
+        BertTokenizer.MASK_TOKEN,
+        BertTokenizer.UNKNOWN_TOKEN,
+        BertTokenizer.PAD_TOKEN
+    );
+    public static final List<String> MPNET_REQUIRED_VOCAB = List.of(
+        MPNetTokenizer.UNKNOWN_TOKEN,
+        MPNetTokenizer.SEPARATOR_TOKEN,
+        MPNetTokenizer.PAD_TOKEN,
+        MPNetTokenizer.CLASS_TOKEN,
+        MPNetTokenizer.MASK_TOKEN
+    );
+    public static final List<String> ROBERTA_REQUIRED_VOCAB = List.of(
+        RobertaTokenizer.UNKNOWN_TOKEN,
+        RobertaTokenizer.SEPARATOR_TOKEN,
+        RobertaTokenizer.PAD_TOKEN,
+        RobertaTokenizer.CLASS_TOKEN,
+        RobertaTokenizer.MASK_TOKEN
+    );
+
+    void validateBuilder(List<String> vocab, Tokenization tokenization, Class<?> expectedClass) {
+        Vocabulary vocabulary = new Vocabulary(vocab, "model-name", null);
+        NlpTokenizer tokenizer = NlpTokenizer.build(vocabulary, tokenization);
+        assertThat(tokenizer, instanceOf(expectedClass));
+    }
+
+    public void testBuildTokenizer() {
+        Tokenization bert = new BertTokenization(null, false, null, Tokenization.Truncate.NONE, -1);
+        validateBuilder(BERT_REQUIRED_VOCAB, bert, BertTokenizer.class);
+
+        Tokenization bertjp = new BertJapaneseTokenization(null, false, null, Tokenization.Truncate.NONE, -1);
+        validateBuilder(BERT_REQUIRED_VOCAB, bertjp, BertJapaneseTokenizer.class);
+
+        Tokenization mpnet = new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE, -1);
+        validateBuilder(MPNET_REQUIRED_VOCAB, mpnet, MPNetTokenizer.class);
+
+        Tokenization roberta = new RobertaTokenization(null, false, null, Tokenization.Truncate.NONE, -1);
+        validateBuilder(ROBERTA_REQUIRED_VOCAB, roberta, RobertaTokenizer.class);
+    }
+}

+ 32 - 3
x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistry.java

@@ -7,6 +7,8 @@
 
 package org.elasticsearch.xpack.profiler;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.elasticsearch.client.internal.Client;
 import org.elasticsearch.cluster.ClusterState;
 import org.elasticsearch.cluster.metadata.ComponentTemplate;
@@ -27,12 +29,14 @@ import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 /**
  * Creates all index-templates and ILM policies that are required for using Elastic Universal Profiling.
  */
 public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry {
+    private static final Logger logger = LogManager.getLogger(ProfilingIndexTemplateRegistry.class);
     // history (please add a comment why you increased the version here)
     // version 1: initial
     public static final int INDEX_TEMPLATE_VERSION = 1;
@@ -70,9 +74,11 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry {
     }
 
     private static final List<LifecyclePolicy> LIFECYCLE_POLICIES = List.of(
-        new LifecyclePolicyConfig("profiling", "/org/elasticsearch/xpack/profiler/ilm-policy/profiling-60-days.json").load(
-            LifecyclePolicyConfig.DEFAULT_X_CONTENT_REGISTRY
-        )
+        new LifecyclePolicyConfig(
+            "profiling",
+            "/org/elasticsearch/xpack/profiler/ilm-policy/profiling-60-days.json",
+            Map.of(PROFILING_TEMPLATE_VERSION_VARIABLE, String.valueOf(INDEX_TEMPLATE_VERSION))
+        ).load(LifecyclePolicyConfig.DEFAULT_X_CONTENT_REGISTRY)
     );
 
     @Override
@@ -214,6 +220,29 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry {
         return templatesEnabled ? COMPOSABLE_INDEX_TEMPLATE_CONFIGS : Collections.emptyMap();
     }
 
+    @Override
+    protected boolean isUpgradeRequired(LifecyclePolicy currentPolicy, LifecyclePolicy newPolicy) {
+        try {
+            return getVersion(currentPolicy, "current") < getVersion(newPolicy, "new");
+        } catch (IllegalArgumentException ex) {
+            logger.warn("Cannot determine whether lifecycle policy upgrade is required.", ex);
+            // don't attempt an upgrade on invalid data
+            return false;
+        }
+    }
+
+    private int getVersion(LifecyclePolicy policy, String logicalVersion) {
+        Map<String, Object> meta = policy.getMetadata();
+        try {
+            return meta != null ? Integer.parseInt(meta.getOrDefault("version", Integer.MIN_VALUE).toString()) : Integer.MIN_VALUE;
+        } catch (NumberFormatException ex) {
+            throw new IllegalArgumentException(
+                String.format(Locale.ROOT, "Invalid version metadata for %s lifecycle policy [%s]", logicalVersion, policy.getName()),
+                ex
+            );
+        }
+    }
+
     public static boolean areAllTemplatesCreated(ClusterState state) {
         for (String componentTemplate : COMPONENT_TEMPLATE_CONFIGS.keySet()) {
             if (state.metadata().componentTemplates().containsKey(componentTemplate) == false) {

+ 81 - 10
x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistryTests.java

@@ -47,9 +47,9 @@ import org.junit.After;
 import org.junit.Before;
 
 import java.io.IOException;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.stream.Collectors;
@@ -93,7 +93,7 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
             return null;
         });
 
-        ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes);
+        ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), nodes);
         registry.clusterChanged(event);
     }
 
@@ -106,7 +106,7 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
         for (String templateName : registry.getComponentTemplateConfigs().keySet()) {
             componentTemplates.put(templateName, ProfilingIndexTemplateRegistry.INDEX_TEMPLATE_VERSION);
         }
-        ClusterChangedEvent event = createClusterChangedEvent(componentTemplates, Collections.emptyMap(), nodes);
+        ClusterChangedEvent event = createClusterChangedEvent(componentTemplates, Map.of(), nodes);
 
         AtomicInteger calledTimes = new AtomicInteger(0);
 
@@ -122,13 +122,13 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
         // will not be issued anymore, leaving calledTimes to 0
         assertBusy(() -> {
             // now delete one template from the cluster state and lets retry
-            ClusterChangedEvent newEvent = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes);
+            ClusterChangedEvent newEvent = createClusterChangedEvent(Map.of(), Map.of(), nodes);
             registry.clusterChanged(newEvent);
             assertThat(calledTimes.get(), greaterThan(1));
         });
     }
 
-    public void testThatNonExistingPoliciesAreAddedImmediately() {
+    public void testThatNonExistingPoliciesAreAddedImmediately() throws Exception {
         DiscoveryNode node = DiscoveryNodeUtils.create("node");
         DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
 
@@ -153,6 +153,10 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
                 return null;
             }
         });
+
+        ClusterChangedEvent newEvent = createClusterChangedEvent(Map.of(), Map.of(), Map.of(), nodes);
+        registry.clusterChanged(newEvent);
+        assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getPolicyConfigs().size())));
     }
 
     public void testPolicyAlreadyExists() {
@@ -182,7 +186,7 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
             return null;
         });
 
-        ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), policyMap, nodes);
+        ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), policyMap, nodes);
         registry.clusterChanged(event);
     }
 
@@ -191,7 +195,11 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
         DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
 
         Map<String, LifecyclePolicy> policyMap = new HashMap<>();
-        String policyStr = "{\"phases\":{\"delete\":{\"min_age\":\"1m\",\"actions\":{\"delete\":{}}}}}";
+        String policyStr = String.format(
+            Locale.ROOT,
+            "{\"_meta\":{\"version\":%d},\"phases\":{\"delete\":{\"min_age\":\"1m\",\"actions\":{\"delete\":{}}}}}",
+            ProfilingIndexTemplateRegistry.INDEX_TEMPLATE_VERSION
+        );
         List<LifecyclePolicy> policies = registry.getPolicyConfigs();
         assertThat(policies, hasSize(1));
         policies.forEach(p -> policyMap.put(p.getName(), p));
@@ -233,8 +241,71 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
         ) {
             LifecyclePolicy different = LifecyclePolicy.parse(parser, policies.get(0).getName());
             policyMap.put(policies.get(0).getName(), different);
-            ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), policyMap, nodes);
+            ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), policyMap, nodes);
+            registry.clusterChanged(event);
+        }
+    }
+
+    public void testPolicyUpgraded() throws Exception {
+        DiscoveryNode node = DiscoveryNodeUtils.create("node");
+        DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
+
+        Map<String, LifecyclePolicy> policyMap = new HashMap<>();
+        // set version to 0 to force an upgrade (proper versions start at 1)
+        String priorPolicyStr = "{\"_meta\":{\"version\":0},\"phases\":{\"delete\":{\"min_age\":\"1m\",\"actions\":{\"delete\":{}}}}}";
+        List<LifecyclePolicy> policies = registry.getPolicyConfigs();
+        assertThat(policies, hasSize(1));
+        policies.forEach(p -> policyMap.put(p.getName(), p));
+
+        AtomicInteger calledTimes = new AtomicInteger(0);
+        client.setVerifier((action, request, listener) -> {
+            if (action instanceof PutComponentTemplateAction) {
+                // Ignore this, it's verified in another test
+                return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutComposableIndexTemplateAction) {
+                // Ignore this, it's verified in another test
+                return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutIndexTemplateAction) {
+                // Ignore this, it's verified in another test
+                return AcknowledgedResponse.TRUE;
+            } else if (action instanceof PutLifecycleAction) {
+                calledTimes.incrementAndGet();
+                assertThat(action, instanceOf(PutLifecycleAction.class));
+                assertThat(request, instanceOf(PutLifecycleAction.Request.class));
+                final PutLifecycleAction.Request putRequest = (PutLifecycleAction.Request) request;
+                assertThat(putRequest.getPolicy().getName(), equalTo("profiling"));
+                assertNotNull(listener);
+                return AcknowledgedResponse.TRUE;
+
+            } else {
+                fail("client called with unexpected request: " + request.toString());
+            }
+            return null;
+        });
+
+        try (
+            XContentParser parser = XContentType.JSON.xContent()
+                .createParser(
+                    XContentParserConfiguration.EMPTY.withRegistry(
+                        new NamedXContentRegistry(
+                            List.of(
+                                new NamedXContentRegistry.Entry(
+                                    LifecycleAction.class,
+                                    new ParseField(DeleteAction.NAME),
+                                    DeleteAction::parse
+                                )
+                            )
+                        )
+                    ),
+                    priorPolicyStr
+                )
+        ) {
+            LifecyclePolicy priorPolicy = LifecyclePolicy.parse(parser, policies.get(0).getName());
+            policyMap.put(policies.get(0).getName(), priorPolicy);
+            ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), policyMap, nodes);
             registry.clusterChanged(event);
+            // we've changed one policy that should be upgraded
+            assertBusy(() -> assertThat(calledTimes.get(), equalTo(1)));
         }
     }
 
@@ -272,7 +343,7 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
         Map<String, Integer> existingComposableTemplates,
         DiscoveryNodes nodes
     ) {
-        return createClusterChangedEvent(existingComponentTemplates, existingComposableTemplates, Collections.emptyMap(), nodes);
+        return createClusterChangedEvent(existingComponentTemplates, existingComposableTemplates, Map.of(), nodes);
     }
 
     private ClusterChangedEvent createClusterChangedEvent(
@@ -321,7 +392,7 @@ public class ProfilingIndexTemplateRegistryTests extends ESTestCase {
 
         Map<String, LifecyclePolicyMetadata> existingILMMeta = existingPolicies.entrySet()
             .stream()
-            .collect(Collectors.toMap(Map.Entry::getKey, e -> new LifecyclePolicyMetadata(e.getValue(), Collections.emptyMap(), 1, 1)));
+            .collect(Collectors.toMap(Map.Entry::getKey, e -> new LifecyclePolicyMetadata(e.getValue(), Map.of(), 1, 1)));
         IndexLifecycleMetadata ilmMeta = new IndexLifecycleMetadata(existingILMMeta, OperationMode.RUNNING);
 
         return ClusterState.builder(new ClusterName("test"))

+ 23 - 0
x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml

@@ -553,3 +553,26 @@ setup:
               "ner": { }
             }
           }
+
+---
+"Test put model config with Japanese tokenizer":
+  - do:
+      ml.put_trained_model:
+        model_id: j_bert
+        body: >
+          {
+            "description": "model config with Japanese tokenizer",
+            "model_type": "pytorch",
+            "inference_config": {
+              "pass_through": {
+                "tokenization": {
+                  "bert_ja": {
+                    "with_special_tokens": false,
+                    "max_sequence_length": 512,
+                    "truncate": "first",
+                    "span": -1
+                  }
+                }
+              }
+            }
+          }