浏览代码

Added facet support to the percolate api.
Closes #3851

Martijn van Groningen 12 年之前
父节点
当前提交
8d49aa398f

+ 53 - 20
src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java

@@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.index.query.FilterBuilder;
 import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.search.facet.FacetBuilder;
 import org.elasticsearch.search.highlight.HighlightBuilder;
 
 import java.util.Map;
@@ -104,16 +105,68 @@ public class PercolateRequestBuilder extends BroadcastOperationRequestBuilder<Pe
         return this;
     }
 
+    /**
+     * Similar as {@link #setScore(boolean)}, but also sort by the score.
+     */
     public PercolateRequestBuilder setSort(boolean sort) {
         sourceBuilder().setSort(sort);
         return this;
     }
 
+    /**
+     * Whether to compute a score for each match and include it in the response. The score is based on
+     * {@link #setPercolateQuery(QueryBuilder)}}.
+     */
     public PercolateRequestBuilder setScore(boolean score) {
         sourceBuilder().setScore(score);
         return this;
     }
 
+    /**
+     * Sets a query to reduce the number of percolate queries to be evaluated and score the queries that match based
+     * on this query.
+     */
+    public PercolateRequestBuilder setPercolateDoc(PercolateSourceBuilder.DocBuilder docBuilder) {
+        sourceBuilder().setDoc(docBuilder);
+        return this;
+    }
+
+    /**
+     * Sets a query to reduce the number of percolate queries to be evaluated and score the queries that match based
+     * on this query.
+     */
+    public PercolateRequestBuilder setPercolateQuery(QueryBuilder queryBuilder) {
+        sourceBuilder().setQueryBuilder(queryBuilder);
+        return this;
+    }
+
+    /**
+     * Sets a filter to reduce the number of percolate queries to be evaluated.
+     */
+    public PercolateRequestBuilder setPercolateFilter(FilterBuilder filterBuilder) {
+        sourceBuilder().setFilterBuilder(filterBuilder);
+        return this;
+    }
+
+    /**
+     * Enables highlighting for the percolate document. Per matched percolate query highlight the percolate document.
+     */
+    public PercolateRequestBuilder setHighlightBuilder(HighlightBuilder highlightBuilder) {
+        sourceBuilder().setHighlightBuilder(highlightBuilder);
+        return this;
+    }
+
+    /**
+     * Add a facet definition.
+     */
+    public PercolateRequestBuilder addFacet(FacetBuilder facetBuilder) {
+        sourceBuilder().addFacet(facetBuilder);
+        return this;
+    }
+
+    /**
+     * Sets the raw percolate request body.
+     */
     public PercolateRequestBuilder setSource(PercolateSourceBuilder source) {
         sourceBuilder = source;
         return this;
@@ -164,26 +217,6 @@ public class PercolateRequestBuilder extends BroadcastOperationRequestBuilder<Pe
         return this;
     }
 
-    public PercolateRequestBuilder setPercolateDoc(PercolateSourceBuilder.DocBuilder docBuilder) {
-        sourceBuilder().setDoc(docBuilder);
-        return this;
-    }
-
-    public PercolateRequestBuilder setPercolateQuery(QueryBuilder queryBuilder) {
-        sourceBuilder().setQueryBuilder(queryBuilder);
-        return this;
-    }
-
-    public PercolateRequestBuilder setPercolateFilter(FilterBuilder filterBuilder) {
-        sourceBuilder().setFilterBuilder(filterBuilder);
-        return this;
-    }
-
-    public PercolateRequestBuilder setHighlightBuilder(HighlightBuilder highlightBuilder) {
-        sourceBuilder().setHighlightBuilder(highlightBuilder);
-        return this;
-    }
-
     private PercolateSourceBuilder sourceBuilder() {
         if (sourceBuilder == null) {
             sourceBuilder = new PercolateSourceBuilder();

+ 10 - 8
src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java

@@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.common.xcontent.XContentBuilderString;
 import org.elasticsearch.percolator.PercolatorService;
 import org.elasticsearch.rest.action.support.RestActions;
+import org.elasticsearch.search.facet.InternalFacets;
 import org.elasticsearch.search.highlight.HighlightField;
 
 import java.io.IOException;
@@ -46,20 +47,15 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
     private long tookInMillis;
     private Match[] matches;
     private long count;
+    private InternalFacets facets;
 
     public PercolateResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures,
-                             Match[] matches, long count, long tookInMillis) {
+                             Match[] matches, long count, long tookInMillis, InternalFacets facets) {
         super(totalShards, successfulShards, failedShards, shardFailures);
         this.tookInMillis = tookInMillis;
         this.matches = matches;
         this.count = count;
-    }
-
-    public PercolateResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures, long count, long tookInMillis) {
-        super(totalShards, successfulShards, failedShards, shardFailures);
-        this.tookInMillis = tookInMillis;
-        this.matches = EMPTY;
-        this.count = count;
+        this.facets = facets;
     }
 
     public PercolateResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures, long tookInMillis) {
@@ -97,6 +93,10 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
         return count;
     }
 
+    public InternalFacets getFacets() {
+        return facets;
+    }
+
     @Override
     public Iterator<Match> iterator() {
         return Arrays.asList(matches).iterator();
@@ -163,6 +163,7 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
             matches[i] = new Match();
             matches[i].readFrom(in);
         }
+        facets = InternalFacets.readOptionalFacets(in);
     }
 
     @Override
@@ -174,6 +175,7 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
         for (Match match : matches) {
             match.writeTo(out);
         }
+        out.writeOptionalStreamable(facets);
     }
 
     public static class Match implements Streamable {

+ 19 - 0
src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java

@@ -24,6 +24,7 @@ import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRespons
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
 import org.elasticsearch.percolator.PercolateContext;
+import org.elasticsearch.search.facet.InternalFacets;
 import org.elasticsearch.search.highlight.HighlightField;
 
 import java.io.IOException;
@@ -45,6 +46,8 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
     private byte percolatorTypeId;
     private int requestedSize;
 
+    private InternalFacets facets;
+
     PercolateShardResponse() {
     }
 
@@ -56,6 +59,7 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
         this.scores = scores;
         this.percolatorTypeId = context.percolatorTypeId;
         this.requestedSize = context.size;
+        buildFacets(context);
     }
 
     public PercolateShardResponse(BytesRef[] matches, long count, float[] scores, PercolateContext context, String index, int shardId) {
@@ -65,6 +69,7 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
         this.scores = scores;
         this.percolatorTypeId = context.percolatorTypeId;
         this.requestedSize = context.size;
+        buildFacets(context);
     }
 
     public PercolateShardResponse(BytesRef[] matches, List<Map<String, HighlightField>> hls, long count, PercolateContext context, String index, int shardId) {
@@ -75,6 +80,7 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
         this.count = count;
         this.percolatorTypeId = context.percolatorTypeId;
         this.requestedSize = context.size;
+        buildFacets(context);
     }
 
     public PercolateShardResponse(long count, PercolateContext context, String index, int shardId) {
@@ -84,6 +90,7 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
         this.scores = new float[0];
         this.percolatorTypeId = context.percolatorTypeId;
         this.requestedSize = context.size;
+        buildFacets(context);
     }
 
     public PercolateShardResponse(PercolateContext context, String index, int shardId) {
@@ -113,6 +120,10 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
         return hls;
     }
 
+    public InternalFacets facets() {
+        return facets;
+    }
+
     public byte percolatorTypeId() {
         return percolatorTypeId;
     }
@@ -144,6 +155,7 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
             }
             hls.add(fields);
         }
+        facets = InternalFacets.readOptionalFacets(in);
     }
 
     @Override
@@ -168,5 +180,12 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
                 entry.getValue().writeTo(out);
             }
         }
+        out.writeOptionalStreamable(facets);
+    }
+
+    private void buildFacets(PercolateContext context) {
+        if (context.queryResult() != null && context.queryResult().facets() != null) {
+            this.facets = new InternalFacets(context.queryResult().facets().facets());
+        }
     }
 }

+ 47 - 0
src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java

@@ -19,6 +19,7 @@
 
 package org.elasticsearch.action.percolate;
 
+import com.google.common.collect.Lists;
 import org.elasticsearch.ElasticSearchGenerationException;
 import org.elasticsearch.common.bytes.BytesArray;
 import org.elasticsearch.common.bytes.BytesReference;
@@ -26,13 +27,16 @@ import org.elasticsearch.common.xcontent.*;
 import org.elasticsearch.index.query.FilterBuilder;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.builder.SearchSourceBuilderException;
+import org.elasticsearch.search.facet.FacetBuilder;
 import org.elasticsearch.search.highlight.HighlightBuilder;
 
 import java.io.IOException;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 /**
+ * Builder to create the percolate request body.
  */
 public class PercolateSourceBuilder implements ToXContent {
 
@@ -43,6 +47,7 @@ public class PercolateSourceBuilder implements ToXContent {
     private Boolean sort;
     private Boolean score;
     private HighlightBuilder highlightBuilder;
+    private List<FacetBuilder> facets;
 
     public DocBuilder percolateDocument() {
         if (docBuilder == null) {
@@ -55,6 +60,9 @@ public class PercolateSourceBuilder implements ToXContent {
         return docBuilder;
     }
 
+    /**
+     * Sets the document to run the percolate queries against.
+     */
     public PercolateSourceBuilder setDoc(DocBuilder docBuilder) {
         this.docBuilder = docBuilder;
         return this;
@@ -64,6 +72,10 @@ public class PercolateSourceBuilder implements ToXContent {
         return queryBuilder;
     }
 
+    /**
+     * Sets a query to reduce the number of percolate queries to be evaluated and score the queries that match based
+     * on this query.
+     */
     public PercolateSourceBuilder setQueryBuilder(QueryBuilder queryBuilder) {
         this.queryBuilder = queryBuilder;
         return this;
@@ -73,31 +85,58 @@ public class PercolateSourceBuilder implements ToXContent {
         return filterBuilder;
     }
 
+    /**
+     * Sets a filter to reduce the number of percolate queries to be evaluated.
+     */
     public PercolateSourceBuilder setFilterBuilder(FilterBuilder filterBuilder) {
         this.filterBuilder = filterBuilder;
         return this;
     }
 
+    /**
+     * Limits the maximum number of percolate query matches to be returned.
+     */
     public PercolateSourceBuilder setSize(int size) {
         this.size = size;
         return this;
     }
 
+    /**
+     * Similar as {@link #setScore(boolean)}, but also sort by the score.
+     */
     public PercolateSourceBuilder setSort(boolean sort) {
         this.sort = sort;
         return this;
     }
 
+    /**
+     * Whether to compute a score for each match and include it in the response. The score is based on
+     * {@link #setQueryBuilder(QueryBuilder)}.
+     */
     public PercolateSourceBuilder setScore(boolean score) {
         this.score = score;
         return this;
     }
 
+    /**
+     * Enables highlighting for the percolate document. Per matched percolate query highlight the percolate document.
+     */
     public PercolateSourceBuilder setHighlightBuilder(HighlightBuilder highlightBuilder) {
         this.highlightBuilder = highlightBuilder;
         return this;
     }
 
+    /**
+     * Add a facet definition.
+     */
+    public PercolateSourceBuilder addFacet(FacetBuilder facetBuilder) {
+        if (facets == null) {
+            facets = Lists.newArrayList();
+        }
+        facets.add(facetBuilder);
+        return this;
+    }
+
     public BytesReference buildAsBytes(XContentType contentType) throws SearchSourceBuilderException {
         try {
             XContentBuilder builder = XContentFactory.contentBuilder(contentType);
@@ -134,6 +173,14 @@ public class PercolateSourceBuilder implements ToXContent {
         if (highlightBuilder != null) {
             highlightBuilder.toXContent(builder, params);
         }
+        if (facets != null) {
+            builder.field("facets");
+            builder.startObject();
+            for (FacetBuilder facet : facets) {
+                facet.toXContent(builder, params);
+            }
+            builder.endObject();
+        }
         builder.endObject();
         return builder;
     }

+ 1 - 1
src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java

@@ -162,7 +162,7 @@ public class TransportPercolateAction extends TransportBroadcastOperationAction<
             long tookInMillis = System.currentTimeMillis() - request.startTime;
             return new PercolateResponse(
                     shardsResponses.length(), successfulShards, failedShards, shardFailures,
-                    result.matches(), result.count(), tookInMillis
+                    result.matches(), result.count(), tookInMillis, result.reducedFacets()
             );
         }
     }

+ 12 - 0
src/main/java/org/elasticsearch/common/io/stream/StreamInput.java

@@ -437,4 +437,16 @@ public abstract class StreamInput extends InputStream {
                 throw new IOException("Can't read unknown type [" + type + "]");
         }
     }
+
+    /**
+     * Serializes a potential null value.
+     */
+    public <T extends Streamable> T readOptionalStreamable(T streamable) throws IOException {
+        if (readBoolean()) {
+            streamable.readFrom(this);
+            return streamable;
+        } else {
+            return null;
+        }
+    }
 }

+ 12 - 0
src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java

@@ -412,4 +412,16 @@ public abstract class StreamOutput extends OutputStream {
             throw new IOException("Can't write type [" + type + "]");
         }
     }
+
+    /**
+     * Serializes a potential null value.
+     */
+    public void writeOptionalStreamable(@Nullable Streamable streamable) throws IOException {
+        if (streamable != null) {
+            writeBoolean(true);
+            streamable.writeTo(this);
+        } else {
+            writeBoolean(false);
+        }
+    }
 }

+ 42 - 27
src/main/java/org/elasticsearch/percolator/PercolateContext.java

@@ -91,10 +91,14 @@ public class PercolateContext extends SearchContext {
     private final IndexService indexService;
     private final IndexFieldDataService fieldDataService;
     private final IndexShard indexShard;
+    private final CacheRecycler cacheRecycler;
     private final ConcurrentMap<HashedBytesRef, Query> percolateQueries;
     private String[] types;
 
     private Engine.Searcher docEngineSearcher;
+    private Engine.Searcher engineSearcher;
+    private ContextIndexSearcher searcher;
+
     private SearchContextHighlight highlight;
     private SearchLookup searchLookup;
     private ParsedQuery parsedQuery;
@@ -102,8 +106,10 @@ public class PercolateContext extends SearchContext {
     private boolean queryRewritten;
     private Query percolateQuery;
     private FetchSubPhase.HitContext hitContext;
+    private SearchContextFacets facets;
+    private QuerySearchResult querySearchResult;
 
-    public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard, IndexService indexService) {
+    public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard, IndexService indexService, CacheRecycler cacheRecycler) {
         this.request = request;
         this.indexShard = indexShard;
         this.indexService = indexService;
@@ -111,6 +117,10 @@ public class PercolateContext extends SearchContext {
         this.searchShardTarget = searchShardTarget;
         this.percolateQueries = indexShard.percolateRegistry().percolateQueries();
         this.types = new String[]{request.documentType()};
+        this.cacheRecycler = cacheRecycler;
+        this.querySearchResult = new QuerySearchResult(0, searchShardTarget);
+        this.engineSearcher = indexShard.acquireSearcher("percolate");
+        this.searcher = new ContextIndexSearcher(this, engineSearcher);
     }
 
     public void initialize(final MemoryIndex memoryIndex, ParsedDocument parsedDocument) {
@@ -209,13 +219,17 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public boolean release() throws ElasticSearchException {
-        if (docEngineSearcher != null) {
-            IndexReader indexReader = docEngineSearcher.reader();
-            fieldDataService.clear(indexReader);
-            indexService.cache().clear(indexReader);
-            return docEngineSearcher.release();
-        } else {
-            return false;
+        try {
+            if (docEngineSearcher != null) {
+                IndexReader indexReader = docEngineSearcher.reader();
+                fieldDataService.clear(indexReader);
+                indexService.cache().clear(indexReader);
+                return docEngineSearcher.release();
+            } else {
+                return false;
+            }
+        } finally {
+            engineSearcher.release();
         }
     }
 
@@ -226,9 +240,9 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public SearchContext parsedQuery(ParsedQuery query) {
-        parsedQuery = query;
+        this.parsedQuery = query;
         this.query = query.query();
-        queryRewritten = false;
+        this.queryRewritten = false;
         return this;
     }
 
@@ -269,6 +283,17 @@ public class PercolateContext extends SearchContext {
         return fieldDataService;
     }
 
+    @Override
+    public SearchContextFacets facets() {
+        return facets;
+    }
+
+    @Override
+    public SearchContext facets(SearchContextFacets facets) {
+        this.facets = facets;
+        return this;
+    }
+
     // Unused:
     @Override
     public boolean clearAndRelease() {
@@ -345,16 +370,6 @@ public class PercolateContext extends SearchContext {
         throw new UnsupportedOperationException();
     }
 
-    @Override
-    public SearchContextFacets facets() {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public SearchContext facets(SearchContextFacets facets) {
-        throw new UnsupportedOperationException();
-    }
-
     @Override
     public SuggestionSearchContext suggest() {
         throw new UnsupportedOperationException();
@@ -417,7 +432,7 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public ContextIndexSearcher searcher() {
-        throw new UnsupportedOperationException();
+        return searcher;
     }
 
     @Override
@@ -442,7 +457,7 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public CacheRecycler cacheRecycler() {
-        throw new UnsupportedOperationException();
+        return cacheRecycler;
     }
 
     @Override
@@ -462,7 +477,7 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public long timeoutInMillis() {
-        throw new UnsupportedOperationException();
+        return -1;
     }
 
     @Override
@@ -477,7 +492,7 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public Float minimumScore() {
-        throw new UnsupportedOperationException();
+        return null;
     }
 
     @Override
@@ -507,7 +522,7 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public ParsedFilter parsedFilter() {
-        throw new UnsupportedOperationException();
+        return null;
     }
 
     @Override
@@ -627,7 +642,7 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public QuerySearchResult queryResult() {
-        throw new UnsupportedOperationException();
+        return querySearchResult;
     }
 
     @Override
@@ -662,7 +677,7 @@ public class PercolateContext extends SearchContext {
 
     @Override
     public FieldMapper smartNameFieldMapper(String name) {
-        throw new UnsupportedOperationException();
+        return mapperService().smartNameFieldMapper(name, types);
     }
 
     @Override

+ 136 - 51
src/main/java/org/elasticsearch/percolator/PercolatorService.java

@@ -20,6 +20,7 @@ package org.elasticsearch.percolator;
 
 import com.carrotsearch.hppc.ByteObjectOpenHashMap;
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.index.AtomicReaderContext;
 import org.apache.lucene.index.IndexableField;
@@ -35,6 +36,7 @@ import org.elasticsearch.ElasticSearchParseException;
 import org.elasticsearch.action.percolate.PercolateResponse;
 import org.elasticsearch.action.percolate.PercolateShardRequest;
 import org.elasticsearch.action.percolate.PercolateShardResponse;
+import org.elasticsearch.cache.recycler.CacheRecycler;
 import org.elasticsearch.cluster.ClusterService;
 import org.elasticsearch.common.bytes.BytesArray;
 import org.elasticsearch.common.bytes.BytesReference;
@@ -43,6 +45,7 @@ import org.elasticsearch.common.inject.Inject;
 import org.elasticsearch.common.io.stream.BytesStreamOutput;
 import org.elasticsearch.common.lucene.HashedBytesRef;
 import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.common.lucene.search.XCollector;
 import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
 import org.elasticsearch.common.settings.ImmutableSettings;
 import org.elasticsearch.common.settings.Settings;
@@ -76,6 +79,10 @@ import org.elasticsearch.percolator.QueryCollector.MatchAndScore;
 import org.elasticsearch.percolator.QueryCollector.MatchAndSort;
 import org.elasticsearch.search.SearchParseElement;
 import org.elasticsearch.search.SearchShardTarget;
+import org.elasticsearch.search.facet.Facet;
+import org.elasticsearch.search.facet.FacetPhase;
+import org.elasticsearch.search.facet.InternalFacet;
+import org.elasticsearch.search.facet.InternalFacets;
 import org.elasticsearch.search.highlight.HighlightField;
 import org.elasticsearch.search.highlight.HighlightPhase;
 import org.elasticsearch.search.internal.SearchContext;
@@ -97,17 +104,20 @@ public class PercolatorService extends AbstractComponent {
     private final CloseableThreadLocal<MemoryIndex> cache;
     private final IndicesService indicesService;
     private final ByteObjectOpenHashMap<PercolatorType> percolatorTypes;
-
+    private final CacheRecycler cacheRecycler;
     private final ClusterService clusterService;
 
+    private final FacetPhase facetPhase;
     private final HighlightPhase highlightPhase;
 
     @Inject
-    public PercolatorService(Settings settings, IndicesService indicesService, HighlightPhase highlightPhase, ClusterService clusterService) {
+    public PercolatorService(Settings settings, IndicesService indicesService, CacheRecycler cacheRecycler, HighlightPhase highlightPhase, ClusterService clusterService, FacetPhase facetPhase) {
         super(settings);
         this.indicesService = indicesService;
+        this.cacheRecycler = cacheRecycler;
         this.clusterService = clusterService;
         this.highlightPhase = highlightPhase;
+        this.facetPhase = facetPhase;
 
         final long maxReuseBytes = settings.getAsBytesSize("indices.memory.memory_index.size_per_thread", new ByteSizeValue(1, ByteSizeUnit.MB)).bytes();
         cache = new CloseableThreadLocal<MemoryIndex>() {
@@ -116,7 +126,7 @@ public class PercolatorService extends AbstractComponent {
                 return new ExtendedMemoryIndex(true, maxReuseBytes);
             }
         };
-        
+
         percolatorTypes = new ByteObjectOpenHashMap<PercolatorType>(6);
         percolatorTypes.put(countPercolator.id(), countPercolator);
         percolatorTypes.put(queryCountPercolator.id(), queryCountPercolator);
@@ -140,11 +150,11 @@ public class PercolatorService extends AbstractComponent {
         shardPercolateService.prePercolate();
         long startTime = System.nanoTime();
 
+        SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
+        final PercolateContext context = new PercolateContext(
+                request, searchShardTarget, indexShard, percolateIndexService, cacheRecycler
+        );
         try {
-            SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
-            final PercolateContext context = new PercolateContext(
-                    request, searchShardTarget, indexShard, percolateIndexService
-            );
 
             ParsedDocument parsedDocument = parseRequest(percolateIndexService, request, context);
             if (context.percolateQueries().isEmpty()) {
@@ -157,8 +167,8 @@ public class PercolatorService extends AbstractComponent {
                 throw new ElasticSearchIllegalArgumentException("Nothing to percolate");
             }
 
-            if (context.percolateQuery() == null && (context.score || context.sort)) {
-                throw new ElasticSearchIllegalArgumentException("Can't sort or score if query isn't specified");
+            if (context.percolateQuery() == null && (context.score || context.sort || context.facets() != null)) {
+                context.percolateQuery(new MatchAllDocsQuery());
             }
 
             if (context.sort && !context.limit) {
@@ -175,48 +185,40 @@ public class PercolatorService extends AbstractComponent {
 
             // first, parse the source doc into a MemoryIndex
             final MemoryIndex memoryIndex = cache.get();
-            try {
-                // TODO: This means percolation does not support nested docs...
-                // So look into: ByteBufferDirectory
-                for (IndexableField field : parsedDocument.rootDoc().getFields()) {
-                    if (!field.fieldType().indexed()) {
-                        continue;
-                    }
-                    // no need to index the UID field
-                    if (field.name().equals(UidFieldMapper.NAME)) {
-                        continue;
-                    }
-                    TokenStream tokenStream;
-                    try {
-                        tokenStream = field.tokenStream(parsedDocument.analyzer());
-                        if (tokenStream != null) {
-                            memoryIndex.addField(field.name(), tokenStream, field.boost());
-                        }
-                    } catch (IOException e) {
-                        throw new ElasticSearchException("Failed to create token stream", e);
+            // TODO: This means percolation does not support nested docs...
+            // So look into: ByteBufferDirectory
+            for (IndexableField field : parsedDocument.rootDoc().getFields()) {
+                if (!field.fieldType().indexed() && field.name().equals(UidFieldMapper.NAME)) {
+                    continue;
+                }
+                try {
+                    TokenStream tokenStream = field.tokenStream(parsedDocument.analyzer());
+                    if (tokenStream != null) {
+                        memoryIndex.addField(field.name(), tokenStream, field.boost());
                     }
+                } catch (IOException e) {
+                    throw new ElasticSearchException("Failed to create token stream", e);
                 }
+            }
 
-                PercolatorType action;
-                if (request.onlyCount()) {
-                    action = context.percolateQuery() != null ? queryCountPercolator : countPercolator;
+            PercolatorType action;
+            if (request.onlyCount()) {
+                action = context.percolateQuery() != null ? queryCountPercolator : countPercolator;
+            } else {
+                if (context.sort) {
+                    action = topMatchingPercolator;
+                } else if (context.percolateQuery() != null) {
+                    action = context.score ? scoringPercolator : queryPercolator;
                 } else {
-                    if (context.sort) {
-                        action = topMatchingPercolator;
-                    } else if (context.percolateQuery() != null) {
-                        action = context.score ? scoringPercolator : queryPercolator;
-                    } else {
-                        action = matchPercolator;
-                    }
+                    action = matchPercolator;
                 }
-                context.percolatorTypeId = action.id();
-
-                context.initialize(memoryIndex, parsedDocument);
-                return action.doPercolate(request, context);
-            } finally {
-                context.release();
             }
+            context.percolatorTypeId = action.id();
+
+            context.initialize(memoryIndex, parsedDocument);
+            return action.doPercolate(request, context);
         } finally {
+            context.release();
             shardPercolateService.postPercolate(System.nanoTime() - startTime);
         }
     }
@@ -228,6 +230,7 @@ public class PercolatorService extends AbstractComponent {
         }
 
         Map<String, ? extends SearchParseElement> hlElements = highlightPhase.parseElements();
+        Map<String, ? extends SearchParseElement> facetElements = facetPhase.parseElements();
 
         ParsedDocument doc = null;
         XContentParser parser = null;
@@ -258,6 +261,10 @@ public class PercolatorService extends AbstractComponent {
                     }
                 } else if (token == XContentParser.Token.START_OBJECT) {
                     SearchParseElement element = hlElements.get(currentFieldName);
+                    if (element == null) {
+                        element = facetElements.get(currentFieldName);
+                    }
+
                     if ("query".equals(currentFieldName)) {
                         if (context.percolateQuery() != null) {
                             throw new ElasticSearchParseException("Either specify query or filter, not both");
@@ -381,7 +388,14 @@ public class PercolatorService extends AbstractComponent {
             for (PercolateShardResponse shardResponse : shardResults) {
                 finalCount += shardResponse.count();
             }
-            return new ReduceResult(finalCount);
+
+            assert !shardResults.isEmpty();
+            if (shardResults.get(0).facets() != null) {
+                InternalFacets reducedFacets = reduceFacets(shardResults);
+                return new ReduceResult(finalCount, reducedFacets);
+            } else {
+                return new ReduceResult(finalCount);
+            }
         }
 
         @Override
@@ -454,7 +468,8 @@ public class PercolatorService extends AbstractComponent {
 
             // Use a custom impl of AbstractBigArray for Object[]?
             List<PercolateResponse.Match> finalMatches = new ArrayList<PercolateResponse.Match>(requestedSize == 0 ? numMatches : requestedSize);
-            outer: for (PercolateShardResponse response : shardResults) {
+            outer:
+            for (PercolateShardResponse response : shardResults) {
                 Text index = new StringText(response.getIndex());
                 for (int i = 0; i < response.matches().length; i++) {
                     float score = response.scores().length == 0 ? NO_SCORE : response.scores()[i];
@@ -466,7 +481,14 @@ public class PercolatorService extends AbstractComponent {
                     }
                 }
             }
-            return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]));
+
+            assert !shardResults.isEmpty();
+            if (shardResults.get(0).facets() != null) {
+                InternalFacets reducedFacets = reduceFacets(shardResults);
+                return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedFacets);
+            } else {
+                return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]));
+            }
         }
 
         @Override
@@ -657,7 +679,14 @@ public class PercolatorService extends AbstractComponent {
                     }
                 }
             }
-            return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]));
+
+            assert !shardResults.isEmpty();
+            if (shardResults.get(0).facets() != null) {
+                InternalFacets reducedFacets = reduceFacets(shardResults);
+                return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedFacets);
+            } else {
+                return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]));
+            }
         }
 
         @Override
@@ -712,26 +741,52 @@ public class PercolatorService extends AbstractComponent {
 
     };
 
-    private static void queryBasedPercolating(Engine.Searcher percolatorSearcher, PercolateContext context, Collector collector) throws IOException {
+    private void queryBasedPercolating(Engine.Searcher percolatorSearcher, PercolateContext context, QueryCollector percolateCollector) throws IOException {
         Filter percolatorTypeFilter = context.indexService().mapperService().documentMapper(Constants.TYPE_NAME).typeFilter();
         percolatorTypeFilter = context.indexService().cache().filter().cache(percolatorTypeFilter);
         FilteredQuery query = new FilteredQuery(context.percolateQuery(), percolatorTypeFilter);
-        percolatorSearcher.searcher().search(query, collector);
+        percolatorSearcher.searcher().search(query, percolateCollector);
+
+        for (Collector queryCollector : percolateCollector.facetCollectors) {
+            if (queryCollector instanceof XCollector) {
+                ((XCollector) queryCollector).postCollection();
+            }
+        }
+        if (context.facets() != null) {
+            facetPhase.execute(context);
+        }
     }
 
     public final static class ReduceResult {
 
+        private static PercolateResponse.Match[] EMPTY = new PercolateResponse.Match[0];
+
         private final long count;
         private final PercolateResponse.Match[] matches;
+        private final InternalFacets reducedFacets;
+
+        ReduceResult(long count, PercolateResponse.Match[] matches, InternalFacets reducedFacets) {
+            this.count = count;
+            this.matches = matches;
+            this.reducedFacets = reducedFacets;
+        }
 
         ReduceResult(long count, PercolateResponse.Match[] matches) {
             this.count = count;
             this.matches = matches;
+            this.reducedFacets = null;
+        }
+
+        public ReduceResult(long count, InternalFacets reducedFacets) {
+            this.count = count;
+            this.matches = EMPTY;
+            this.reducedFacets = reducedFacets;
         }
 
         public ReduceResult(long count) {
             this.count = count;
-            this.matches = new PercolateResponse.Match[0];
+            this.matches = EMPTY;
+            this.reducedFacets = null;
         }
 
         public long count() {
@@ -741,6 +796,10 @@ public class PercolatorService extends AbstractComponent {
         public PercolateResponse.Match[] matches() {
             return matches;
         }
+
+        public InternalFacets reducedFacets() {
+            return reducedFacets;
+        }
     }
 
     public static final class Constants {
@@ -749,4 +808,30 @@ public class PercolatorService extends AbstractComponent {
 
     }
 
+    private InternalFacets reduceFacets(List<PercolateShardResponse> shardResults) {
+        if (shardResults.size() == 1) {
+            return shardResults.get(0).facets();
+        }
+
+        PercolateShardResponse firstShardResponse = shardResults.get(0);
+        List<Facet> aggregatedFacets = Lists.newArrayList();
+        List<Facet> namedFacets = Lists.newArrayList();
+        for (Facet facet : firstShardResponse.facets()) {
+            // aggregate each facet name into a single list, and aggregate it
+            namedFacets.clear();
+            for (PercolateShardResponse entry : shardResults) {
+                for (Facet facet1 : entry.facets()) {
+                    if (facet.getName().equals(facet1.getName())) {
+                        namedFacets.add(facet1);
+                    }
+                }
+            }
+            if (!namedFacets.isEmpty()) {
+                Facet aggregatedFacet = ((InternalFacet) namedFacets.get(0)).reduce(new InternalFacet.ReduceContext(cacheRecycler, namedFacets));
+                aggregatedFacets.add(aggregatedFacet);
+            }
+        }
+        return new InternalFacets(aggregatedFacets);
+    }
+
 }

+ 43 - 0
src/main/java/org/elasticsearch/percolator/QueryCollector.java

@@ -27,6 +27,7 @@ import org.apache.lucene.util.BytesRef;
 import org.elasticsearch.common.logging.ESLogger;
 import org.elasticsearch.common.lucene.HashedBytesRef;
 import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.common.lucene.search.FilteredCollector;
 import org.elasticsearch.common.settings.ImmutableSettings;
 import org.elasticsearch.index.fielddata.BytesValues;
 import org.elasticsearch.index.fielddata.FieldDataType;
@@ -34,6 +35,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
 import org.elasticsearch.index.mapper.FieldMapper;
 import org.elasticsearch.index.mapper.internal.IdFieldMapper;
 import org.elasticsearch.index.query.ParsedQuery;
+import org.elasticsearch.search.facet.SearchContextFacets;
+import org.elasticsearch.search.facet.nested.NestedFacetExecutor;
 import org.elasticsearch.search.highlight.HighlightField;
 import org.elasticsearch.search.highlight.HighlightPhase;
 
@@ -57,6 +60,9 @@ abstract class QueryCollector extends Collector {
 
     BytesValues values;
 
+    final List<Collector> facetCollectors = new ArrayList<Collector>();
+    final Collector facetCollector;
+
     QueryCollector(ESLogger logger, PercolateContext context) {
         this.logger = logger;
         this.queries = context.percolateQueries();
@@ -66,16 +72,41 @@ abstract class QueryCollector extends Collector {
                 new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")),
                 false
         );
+
+        if (context.facets() != null) {
+            for (SearchContextFacets.Entry entry : context.facets().entries()) {
+                if (entry.isGlobal()) {
+                    continue; // not supported for now
+                }
+                Collector collector = entry.getFacetExecutor().collector();
+                if (entry.getFilter() != null) {
+                    if (collector instanceof NestedFacetExecutor.Collector) {
+                        collector = new NestedFacetExecutor.Collector((NestedFacetExecutor.Collector) collector, entry.getFilter());
+                    } else {
+                        collector = new FilteredCollector(collector, entry.getFilter());
+                    }
+                }
+                facetCollectors.add(collector);
+            }
+        }
+
+        facetCollector = facetCollectors.isEmpty() ? null : MultiCollector.wrap(facetCollectors.toArray(new Collector[facetCollectors.size()]));
     }
 
     @Override
     public void setScorer(Scorer scorer) throws IOException {
+        if (facetCollector != null) {
+            facetCollector.setScorer(scorer);
+        }
     }
 
     @Override
     public void setNextReader(AtomicReaderContext context) throws IOException {
         // we use the UID because id might not be indexed
         values = idFieldData.load(context).getBytesValues();
+        if (facetCollector != null) {
+            facetCollector.setNextReader(context);
+        }
     }
 
     @Override
@@ -145,6 +176,9 @@ abstract class QueryCollector extends Collector {
                         }
                     }
                     counter++;
+                    if (facetCollector != null) {
+                        facetCollector.collect(doc);
+                    }
                 }
             } catch (IOException e) {
                 logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
@@ -188,6 +222,9 @@ abstract class QueryCollector extends Collector {
                 searcher.search(query, collector);
                 if (collector.exists()) {
                     topDocsCollector.collect(doc);
+                    if (facetCollector != null) {
+                        facetCollector.collect(doc);
+                    }
                 }
             } catch (IOException e) {
                 logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
@@ -260,6 +297,9 @@ abstract class QueryCollector extends Collector {
                         }
                     }
                     counter++;
+                    if (facetCollector != null) {
+                        facetCollector.collect(doc);
+                    }
                 }
             } catch (IOException e) {
                 logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
@@ -310,6 +350,9 @@ abstract class QueryCollector extends Collector {
                 searcher.search(query, collector);
                 if (collector.exists()) {
                     counter++;
+                    if (facetCollector != null) {
+                        facetCollector.collect(doc);
+                    }
                 }
             } catch (IOException e) {
                 logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);

+ 4 - 0
src/main/java/org/elasticsearch/search/facet/InternalFacets.java

@@ -131,6 +131,10 @@ public class InternalFacets implements Facets, Streamable, ToXContent, Iterable<
         return result;
     }
 
+    public static InternalFacets readOptionalFacets(StreamInput in) throws IOException {
+        return in.readOptionalStreamable(new InternalFacets());
+    }
+
     @Override
     public void readFrom(StreamInput in) throws IOException {
         int size = in.readVInt();

+ 101 - 0
src/test/java/org/elasticsearch/percolator/PercolatorFacetsTests.java

@@ -0,0 +1,101 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.percolator;
+
+import org.elasticsearch.action.percolate.PercolateRequestBuilder;
+import org.elasticsearch.action.percolate.PercolateResponse;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.search.facet.FacetBuilders;
+import org.elasticsearch.search.facet.terms.TermsFacet;
+import org.elasticsearch.test.AbstractIntegrationTest;
+import org.junit.Test;
+
+import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder;
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
+import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
+import static org.hamcrest.Matchers.arrayWithSize;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class PercolatorFacetsTests extends AbstractIntegrationTest {
+
+    @Test
+    public void testFacets() throws Exception {
+        client().admin().indices().prepareCreate("test").execute().actionGet();
+        ensureGreen();
+
+        int numQueries = atLeast(250);
+        int numUniqueQueries = randomInt(numQueries / 2);
+        String[] values = new String[numUniqueQueries];
+        for (int i = 0; i < values.length; i++) {
+            values[i] = "value" + i;
+        }
+        int[] expectedCount = new int[numUniqueQueries];
+
+        logger.info("--> registering {} queries", numQueries);
+        for (int i = 0; i < numQueries; i++) {
+            String value = values[i % numUniqueQueries];
+            expectedCount[i % numUniqueQueries]++;
+            QueryBuilder queryBuilder = matchQuery("field1", value);
+            client().prepareIndex("test", "_percolator", Integer.toString(i))
+                    .setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", "b").endObject())
+                    .execute().actionGet();
+        }
+        client().admin().indices().prepareRefresh("test").execute().actionGet();
+
+        for (int i = 0; i < numQueries; i++) {
+            String value = values[i % numUniqueQueries];
+            PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate()
+                    .setIndices("test").setDocumentType("type")
+                    .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()))
+                    .addFacet(FacetBuilders.termsFacet("a").field("field2"));
+
+            if (randomBoolean()) {
+                percolateRequestBuilder.setPercolateQuery(matchAllQuery());
+            }
+            if (randomBoolean()) {
+                percolateRequestBuilder.setScore(true);
+            } else {
+                percolateRequestBuilder.setSort(true).setSize(numQueries);
+            }
+
+            boolean countOnly = randomBoolean();
+            if (countOnly) {
+                percolateRequestBuilder.setOnlyCount(countOnly);
+            }
+
+            PercolateResponse response = percolateRequestBuilder.execute().actionGet();
+            assertThat(response.getCount(), equalTo((long) expectedCount[i % numUniqueQueries]));
+            if (!countOnly) {
+                assertThat(response.getMatches(), arrayWithSize(expectedCount[i % numUniqueQueries]));
+            }
+
+            assertThat(response.getFacets().facets().size(), equalTo(1));
+            assertThat(response.getFacets().facets().get(0).getName(), equalTo("a"));
+            assertThat(((TermsFacet)response.getFacets().facets().get(0)).getEntries().size(), equalTo(1));
+            assertThat(((TermsFacet)response.getFacets().facets().get(0)).getEntries().get(0).getCount(), equalTo(expectedCount[i % values.length]));
+            assertThat(((TermsFacet)response.getFacets().facets().get(0)).getEntries().get(0).getTerm().string(), equalTo("b"));
+        }
+    }
+
+}