|
@@ -490,20 +490,6 @@ public class JobResultsProvider {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- private <T, U> T parseGetHit(GetResponse getResponse, BiFunction<XContentParser, U, T> objectParser,
|
|
|
- Consumer<Exception> errorHandler) {
|
|
|
- BytesReference source = getResponse.getSourceAsBytesRef();
|
|
|
-
|
|
|
- try (InputStream stream = source.streamInput();
|
|
|
- XContentParser parser = XContentFactory.xContent(XContentType.JSON)
|
|
|
- .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
|
|
|
- return objectParser.apply(parser, null);
|
|
|
- } catch (IOException e) {
|
|
|
- errorHandler.accept(new ElasticsearchParseException("failed to parse " + getResponse.getType(), e));
|
|
|
- return null;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
/**
|
|
|
* Search for buckets with the parameters in the {@link BucketsQueryBuilder}
|
|
|
* Uses the internal client, so runs as the _xpack user
|
|
@@ -957,19 +943,6 @@ public class JobResultsProvider {
|
|
|
), client::search);
|
|
|
}
|
|
|
|
|
|
- private <U, T> void getResult(String jobId, String resultDescription, GetRequest get, BiFunction<XContentParser, U, T> objectParser,
|
|
|
- Consumer<Result<T>> handler, Consumer<Exception> errorHandler, Supplier<T> notFoundSupplier) {
|
|
|
-
|
|
|
- executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, get, ActionListener.<GetResponse>wrap(getDocResponse -> {
|
|
|
- if (getDocResponse.isExists()) {
|
|
|
- handler.accept(new Result<>(getDocResponse.getIndex(), parseGetHit(getDocResponse, objectParser, errorHandler)));
|
|
|
- } else {
|
|
|
- LOGGER.trace("No {} for job with id {}", resultDescription, jobId);
|
|
|
- handler.accept(new Result<>(null, notFoundSupplier.get()));
|
|
|
- }
|
|
|
- }, errorHandler), client::get);
|
|
|
- }
|
|
|
-
|
|
|
private SearchRequestBuilder createLatestModelSizeStatsSearch(String indexName) {
|
|
|
return client.prepareSearch(indexName)
|
|
|
.setSize(1)
|
|
@@ -1115,11 +1088,14 @@ public class JobResultsProvider {
|
|
|
public void getForecastRequestStats(String jobId, String forecastId, Consumer<ForecastRequestStats> handler,
|
|
|
Consumer<Exception> errorHandler) {
|
|
|
String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId);
|
|
|
- GetRequest getRequest = new GetRequest(indexName, ElasticsearchMappings.DOC_TYPE,
|
|
|
- ForecastRequestStats.documentId(jobId, forecastId));
|
|
|
-
|
|
|
- getResult(jobId, ForecastRequestStats.RESULTS_FIELD.getPreferredName(), getRequest, ForecastRequestStats.LENIENT_PARSER,
|
|
|
- result -> handler.accept(result.result), errorHandler, () -> null);
|
|
|
+ SearchRequestBuilder forecastSearch = client.prepareSearch(indexName)
|
|
|
+ .setQuery(QueryBuilders.idsQuery().addIds(ForecastRequestStats.documentId(jobId, forecastId)));
|
|
|
+
|
|
|
+ searchSingleResult(jobId,
|
|
|
+ ForecastRequestStats.RESULTS_FIELD.getPreferredName(),
|
|
|
+ forecastSearch,
|
|
|
+ ForecastRequestStats.LENIENT_PARSER,result -> handler.accept(result.result),
|
|
|
+ errorHandler, () -> null);
|
|
|
}
|
|
|
|
|
|
public void getForecastStats(String jobId, Consumer<ForecastStats> handler, Consumer<Exception> errorHandler) {
|