|
@@ -51,12 +51,16 @@ import org.elasticsearch.search.fetch.FetchSearchResult;
|
|
|
import org.elasticsearch.search.internal.InternalSearchResponse;
|
|
|
import org.elasticsearch.search.internal.SearchContext;
|
|
|
import org.elasticsearch.search.query.QuerySearchResult;
|
|
|
+import org.elasticsearch.search.suggest.SortBy;
|
|
|
import org.elasticsearch.search.suggest.Suggest;
|
|
|
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
|
|
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
|
|
|
+import org.elasticsearch.search.suggest.term.TermSuggestion;
|
|
|
import org.elasticsearch.test.ESTestCase;
|
|
|
import org.junit.Before;
|
|
|
|
|
|
import java.util.ArrayList;
|
|
|
+import java.util.Collection;
|
|
|
import java.util.Collections;
|
|
|
import java.util.HashMap;
|
|
|
import java.util.List;
|
|
@@ -87,7 +91,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
|
|
});
|
|
|
}
|
|
|
|
|
|
- public void testSort() {
|
|
|
+ public void testSortDocs() {
|
|
|
List<CompletionSuggestion> suggestions = new ArrayList<>();
|
|
|
for (int i = 0; i < randomIntBetween(1, 5); i++) {
|
|
|
suggestions.add(new CompletionSuggestion(randomAlphaOfLength(randomIntBetween(1, 5)), randomIntBetween(1, 20), false));
|
|
@@ -102,16 +106,18 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
|
|
size = first.get().queryResult().size();
|
|
|
}
|
|
|
int accumulatedLength = Math.min(queryResultSize, getTotalQueryHits(results));
|
|
|
- ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(true, results.asList(), null,
|
|
|
- new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE), from, size).scoreDocs;
|
|
|
- for (Suggest.Suggestion<?> suggestion : reducedSuggest(results)) {
|
|
|
+ List<CompletionSuggestion> reducedCompletionSuggestions = reducedSuggest(results);
|
|
|
+ for (Suggest.Suggestion<?> suggestion : reducedCompletionSuggestions) {
|
|
|
int suggestionSize = suggestion.getEntries().get(0).getOptions().size();
|
|
|
accumulatedLength += suggestionSize;
|
|
|
}
|
|
|
+ ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(true, results.asList(), null,
|
|
|
+ new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE), from, size,
|
|
|
+ reducedCompletionSuggestions).scoreDocs;
|
|
|
assertThat(sortedDocs.length, equalTo(accumulatedLength));
|
|
|
}
|
|
|
|
|
|
- public void testSortIsIdempotent() throws Exception {
|
|
|
+ public void testSortDocsIsIdempotent() throws Exception {
|
|
|
int nShards = randomIntBetween(1, 20);
|
|
|
int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2);
|
|
|
long randomSeed = randomLong();
|
|
@@ -126,12 +132,14 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
|
|
size = first.get().queryResult().size();
|
|
|
}
|
|
|
SearchPhaseController.TopDocsStats topDocsStats = new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE);
|
|
|
- ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(ignoreFrom, results.asList(), null, topDocsStats, from, size).scoreDocs;
|
|
|
+ ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(ignoreFrom, results.asList(), null, topDocsStats, from, size,
|
|
|
+ Collections.emptyList()).scoreDocs;
|
|
|
|
|
|
results = generateSeededQueryResults(randomSeed, nShards, Collections.emptyList(), queryResultSize,
|
|
|
useConstantScore);
|
|
|
SearchPhaseController.TopDocsStats topDocsStats2 = new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE);
|
|
|
- ScoreDoc[] sortedDocs2 = SearchPhaseController.sortDocs(ignoreFrom, results.asList(), null, topDocsStats2, from, size).scoreDocs;
|
|
|
+ ScoreDoc[] sortedDocs2 = SearchPhaseController.sortDocs(ignoreFrom, results.asList(), null, topDocsStats2, from, size,
|
|
|
+ Collections.emptyList()).scoreDocs;
|
|
|
assertEquals(sortedDocs.length, sortedDocs2.length);
|
|
|
for (int i = 0; i < sortedDocs.length; i++) {
|
|
|
assertEquals(sortedDocs[i].doc, sortedDocs2[i].doc);
|
|
@@ -204,9 +212,16 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- private static AtomicArray<SearchPhaseResult> generateQueryResults(int nShards,
|
|
|
- List<CompletionSuggestion> suggestions,
|
|
|
- int searchHitsSize, boolean useConstantScore) {
|
|
|
+ /**
|
|
|
+ * Generate random query results received from the provided number of shards, including the provided
|
|
|
+ * number of search hits and randomly generated completion suggestions based on the name and size of the provided ones.
|
|
|
+ * Note that <code>shardIndex</code> is already set to the generated completion suggestions to simulate what
|
|
|
+ * {@link SearchPhaseController#reducedQueryPhase(Collection, boolean, int, boolean)} does, meaning that the returned query results
|
|
|
+ * can be fed directly to
|
|
|
+ * {@link SearchPhaseController#sortDocs(boolean, Collection, Collection, SearchPhaseController.TopDocsStats, int, int, List)}
|
|
|
+ */
|
|
|
+ private static AtomicArray<SearchPhaseResult> generateQueryResults(int nShards, List<CompletionSuggestion> suggestions,
|
|
|
+ int searchHitsSize, boolean useConstantScore) {
|
|
|
AtomicArray<SearchPhaseResult> queryResults = new AtomicArray<>(nShards);
|
|
|
for (int shardIndex = 0; shardIndex < nShards; shardIndex++) {
|
|
|
String clusterAlias = randomBoolean() ? null : "remote";
|
|
@@ -265,17 +280,17 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
|
|
return resultCount;
|
|
|
}
|
|
|
|
|
|
- private static Suggest reducedSuggest(AtomicArray<SearchPhaseResult> results) {
|
|
|
+ private static List<CompletionSuggestion> reducedSuggest(AtomicArray<SearchPhaseResult> results) {
|
|
|
Map<String, List<Suggest.Suggestion<CompletionSuggestion.Entry>>> groupedSuggestion = new HashMap<>();
|
|
|
for (SearchPhaseResult entry : results.asList()) {
|
|
|
for (Suggest.Suggestion<?> suggestion : entry.queryResult().suggest()) {
|
|
|
List<Suggest.Suggestion<CompletionSuggestion.Entry>> suggests =
|
|
|
groupedSuggestion.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>());
|
|
|
- suggests.add((Suggest.Suggestion<CompletionSuggestion.Entry>) suggestion);
|
|
|
+ suggests.add((CompletionSuggestion) suggestion);
|
|
|
}
|
|
|
}
|
|
|
- return new Suggest(groupedSuggestion.values().stream().map(CompletionSuggestion::reduceTo)
|
|
|
- .collect(Collectors.toList()));
|
|
|
+ CompletionSuggestion completionSuggestion = new CompletionSuggestion(null, -1, randomBoolean());
|
|
|
+ return groupedSuggestion.values().stream().map(completionSuggestion::reduce).collect(Collectors.toList());
|
|
|
}
|
|
|
|
|
|
private static AtomicArray<SearchPhaseResult> generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) {
|
|
@@ -645,4 +660,96 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
|
|
assertEquals("field", reduce.sortedTopDocs.collapseField);
|
|
|
assertArrayEquals(collapseValues, reduce.sortedTopDocs.collapseValues);
|
|
|
}
|
|
|
+
|
|
|
+ public void testConsumerSuggestions() {
|
|
|
+ int expectedNumResults = randomIntBetween(1, 100);
|
|
|
+ int bufferSize = randomIntBetween(2, 200);
|
|
|
+ SearchRequest request = randomSearchRequest();
|
|
|
+ request.setBatchedReduceSize(bufferSize);
|
|
|
+ InitialSearchPhase.ArraySearchPhaseResults<SearchPhaseResult> consumer =
|
|
|
+ searchPhaseController.newSearchPhaseResults(request, expectedNumResults);
|
|
|
+ int maxScoreTerm = -1;
|
|
|
+ int maxScorePhrase = -1;
|
|
|
+ int maxScoreCompletion = -1;
|
|
|
+ for (int i = 0; i < expectedNumResults; i++) {
|
|
|
+ QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new ShardId("a", "b", i),
|
|
|
+ null, OriginalIndices.NONE));
|
|
|
+ List<Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>> suggestions =
|
|
|
+ new ArrayList<>();
|
|
|
+ {
|
|
|
+ TermSuggestion termSuggestion = new TermSuggestion("term", 1, SortBy.SCORE);
|
|
|
+ TermSuggestion.Entry entry = new TermSuggestion.Entry(new Text("entry"), 0, 10);
|
|
|
+ int numOptions = randomIntBetween(1, 10);
|
|
|
+ for (int j = 0; j < numOptions; j++) {
|
|
|
+ int score = numOptions - j;
|
|
|
+ maxScoreTerm = Math.max(maxScoreTerm, score);
|
|
|
+ entry.addOption(new TermSuggestion.Entry.Option(new Text("option"), randomInt(), score));
|
|
|
+ }
|
|
|
+ termSuggestion.addTerm(entry);
|
|
|
+ suggestions.add(termSuggestion);
|
|
|
+ }
|
|
|
+ {
|
|
|
+ PhraseSuggestion phraseSuggestion = new PhraseSuggestion("phrase", 1);
|
|
|
+ PhraseSuggestion.Entry entry = new PhraseSuggestion.Entry(new Text("entry"), 0, 10);
|
|
|
+ int numOptions = randomIntBetween(1, 10);
|
|
|
+ for (int j = 0; j < numOptions; j++) {
|
|
|
+ int score = numOptions - j;
|
|
|
+ maxScorePhrase = Math.max(maxScorePhrase, score);
|
|
|
+ entry.addOption(new PhraseSuggestion.Entry.Option(new Text("option"), new Text("option"), score));
|
|
|
+ }
|
|
|
+ phraseSuggestion.addTerm(entry);
|
|
|
+ suggestions.add(phraseSuggestion);
|
|
|
+ }
|
|
|
+ {
|
|
|
+ CompletionSuggestion completionSuggestion = new CompletionSuggestion("completion", 1, false);
|
|
|
+ CompletionSuggestion.Entry entry = new CompletionSuggestion.Entry(new Text("entry"), 0, 10);
|
|
|
+ int numOptions = randomIntBetween(1, 10);
|
|
|
+ for (int j = 0; j < numOptions; j++) {
|
|
|
+ int score = numOptions - j;
|
|
|
+ maxScoreCompletion = Math.max(maxScoreCompletion, score);
|
|
|
+ CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option(j,
|
|
|
+ new Text("option"), score, Collections.emptyMap());
|
|
|
+ entry.addOption(option);
|
|
|
+ }
|
|
|
+ completionSuggestion.addTerm(entry);
|
|
|
+ suggestions.add(completionSuggestion);
|
|
|
+ }
|
|
|
+ result.suggest(new Suggest(suggestions));
|
|
|
+ result.topDocs(new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN), new DocValueFormat[0]);
|
|
|
+ result.setShardIndex(i);
|
|
|
+ result.size(0);
|
|
|
+ consumer.consumeResult(result);
|
|
|
+ }
|
|
|
+ SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce();
|
|
|
+ assertEquals(3, reduce.suggest.size());
|
|
|
+ {
|
|
|
+ TermSuggestion term = reduce.suggest.getSuggestion("term");
|
|
|
+ assertEquals(1, term.getEntries().size());
|
|
|
+ assertEquals(1, term.getEntries().get(0).getOptions().size());
|
|
|
+ assertEquals(maxScoreTerm, term.getEntries().get(0).getOptions().get(0).getScore(), 0f);
|
|
|
+ }
|
|
|
+ {
|
|
|
+ PhraseSuggestion phrase = reduce.suggest.getSuggestion("phrase");
|
|
|
+ assertEquals(1, phrase.getEntries().size());
|
|
|
+ assertEquals(1, phrase.getEntries().get(0).getOptions().size());
|
|
|
+ assertEquals(maxScorePhrase, phrase.getEntries().get(0).getOptions().get(0).getScore(), 0f);
|
|
|
+ }
|
|
|
+ {
|
|
|
+ CompletionSuggestion completion = reduce.suggest.getSuggestion("completion");
|
|
|
+ assertEquals(1, completion.getSize());
|
|
|
+ assertEquals(1, completion.getOptions().size());
|
|
|
+ CompletionSuggestion.Entry.Option option = completion.getOptions().get(0);
|
|
|
+ assertEquals(maxScoreCompletion, option.getScore(), 0f);
|
|
|
+ }
|
|
|
+ assertFinalReduction(request);
|
|
|
+ assertEquals(1, reduce.sortedTopDocs.scoreDocs.length);
|
|
|
+ assertEquals(maxScoreCompletion, reduce.sortedTopDocs.scoreDocs[0].score, 0f);
|
|
|
+ assertEquals(0, reduce.sortedTopDocs.scoreDocs[0].doc);
|
|
|
+ assertNotEquals(-1, reduce.sortedTopDocs.scoreDocs[0].shardIndex);
|
|
|
+ assertEquals(0, reduce.totalHits.value);
|
|
|
+ assertFalse(reduce.sortedTopDocs.isSortedByField);
|
|
|
+ assertNull(reduce.sortedTopDocs.sortFields);
|
|
|
+ assertNull(reduce.sortedTopDocs.collapseField);
|
|
|
+ assertNull(reduce.sortedTopDocs.collapseValues);
|
|
|
+ }
|
|
|
}
|