浏览代码

Facets: Removal from master.

Close #7337
Adrien Grand 11 年之前
父节点
当前提交
ea96359d82
共有 100 个文件被更改,包括 83 次插入7354 次删除
  1. 1 1
      docs/reference/getting-started.asciidoc
  2. 2 2
      docs/reference/index-modules/fielddata.asciidoc
  3. 2 2
      docs/reference/mapping.asciidoc
  4. 1 1
      docs/reference/mapping/fields/type-field.asciidoc
  5. 1 1
      docs/reference/mapping/types/core-types.asciidoc
  6. 2 2
      docs/reference/migration/migrate_1_0.asciidoc
  7. 1 1
      docs/reference/modules/scripting.asciidoc
  8. 2 2
      docs/reference/query-dsl.asciidoc
  9. 1 1
      docs/reference/query-dsl/filters/range-filter.asciidoc
  10. 1 1
      docs/reference/query-dsl/filters/terms-filter.asciidoc
  11. 1 1
      docs/reference/query-dsl/queries/top-children-query.asciidoc
  12. 2 12
      docs/reference/search/aggregations.asciidoc
  13. 8 282
      docs/reference/search/facets.asciidoc
  14. 0 140
      docs/reference/search/facets/date-histogram-facet.asciidoc
  15. 0 3
      docs/reference/search/facets/deprecated.asciidoc
  16. 0 30
      docs/reference/search/facets/filter-facet.asciidoc
  17. 0 258
      docs/reference/search/facets/geo-distance-facet.asciidoc
  18. 0 144
      docs/reference/search/facets/histogram-facet.asciidoc
  19. 0 25
      docs/reference/search/facets/query-facet.asciidoc
  20. 0 125
      docs/reference/search/facets/range-facet.asciidoc
  21. 0 107
      docs/reference/search/facets/statistical-facet.asciidoc
  22. 0 294
      docs/reference/search/facets/terms-facet.asciidoc
  23. 0 59
      docs/reference/search/facets/terms-stats-facet.asciidoc
  24. 1 1
      docs/reference/search/multi-search.asciidoc
  25. 0 2
      docs/reference/search/percolate.asciidoc
  26. 1 1
      docs/reference/search/request/search-type.asciidoc
  27. 0 9
      src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java
  28. 1 17
      src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java
  29. 0 11
      src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java
  30. 0 21
      src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java
  31. 1 1
      src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java
  32. 1 1
      src/main/java/org/elasticsearch/action/search/SearchRequest.java
  33. 8 69
      src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java
  34. 4 9
      src/main/java/org/elasticsearch/action/search/SearchResponse.java
  35. 1 1
      src/main/java/org/elasticsearch/action/search/SearchType.java
  36. 1 1
      src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollScanAction.java
  37. 0 2
      src/main/java/org/elasticsearch/index/query/HasChildFilterParser.java
  38. 0 3
      src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java
  39. 0 2
      src/main/java/org/elasticsearch/index/query/HasParentFilterParser.java
  40. 0 2
      src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java
  41. 2 13
      src/main/java/org/elasticsearch/index/query/NestedFilterParser.java
  42. 0 2
      src/main/java/org/elasticsearch/index/query/NestedQueryParser.java
  43. 0 2
      src/main/java/org/elasticsearch/index/query/TopChildrenQueryParser.java
  44. 0 13
      src/main/java/org/elasticsearch/percolator/PercolateContext.java
  45. 9 63
      src/main/java/org/elasticsearch/percolator/PercolatorService.java
  46. 7 26
      src/main/java/org/elasticsearch/percolator/QueryCollector.java
  47. 2 8
      src/main/java/org/elasticsearch/script/AbstractSearchScript.java
  48. 1 2
      src/main/java/org/elasticsearch/search/SearchModule.java
  49. 1 2
      src/main/java/org/elasticsearch/search/TransportSearchModule.java
  50. 2 2
      src/main/java/org/elasticsearch/search/aggregations/AggregationBinaryParseElement.java
  51. 5 5
      src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java
  52. 1 1
      src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java
  53. 1 1
      src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java
  54. 0 11
      src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsContext.java
  55. 7 81
      src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
  56. 1 33
      src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java
  57. 0 51
      src/main/java/org/elasticsearch/search/facet/DoubleFacetAggregatorBase.java
  58. 0 36
      src/main/java/org/elasticsearch/search/facet/Facet.java
  59. 0 44
      src/main/java/org/elasticsearch/search/facet/FacetBinaryParseElement.java
  60. 0 102
      src/main/java/org/elasticsearch/search/facet/FacetBuilder.java
  61. 0 98
      src/main/java/org/elasticsearch/search/facet/FacetBuilders.java
  62. 0 160
      src/main/java/org/elasticsearch/search/facet/FacetExecutor.java
  63. 0 69
      src/main/java/org/elasticsearch/search/facet/FacetModule.java
  64. 0 142
      src/main/java/org/elasticsearch/search/facet/FacetParseElement.java
  65. 0 52
      src/main/java/org/elasticsearch/search/facet/FacetParser.java
  66. 0 48
      src/main/java/org/elasticsearch/search/facet/FacetParsers.java
  67. 0 204
      src/main/java/org/elasticsearch/search/facet/FacetPhase.java
  68. 0 36
      src/main/java/org/elasticsearch/search/facet/FacetPhaseExecutionException.java
  69. 0 56
      src/main/java/org/elasticsearch/search/facet/Facets.java
  70. 0 109
      src/main/java/org/elasticsearch/search/facet/InternalFacet.java
  71. 0 164
      src/main/java/org/elasticsearch/search/facet/InternalFacets.java
  72. 0 52
      src/main/java/org/elasticsearch/search/facet/LongFacetAggregatorBase.java
  73. 0 102
      src/main/java/org/elasticsearch/search/facet/SearchContextFacets.java
  74. 0 49
      src/main/java/org/elasticsearch/search/facet/TransportFacetModule.java
  75. 0 120
      src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java
  76. 0 180
      src/main/java/org/elasticsearch/search/facet/datehistogram/DateHistogramFacet.java
  77. 0 264
      src/main/java/org/elasticsearch/search/facet/datehistogram/DateHistogramFacetBuilder.java
  78. 0 222
      src/main/java/org/elasticsearch/search/facet/datehistogram/DateHistogramFacetParser.java
  79. 0 217
      src/main/java/org/elasticsearch/search/facet/datehistogram/InternalCountDateHistogramFacet.java
  80. 0 45
      src/main/java/org/elasticsearch/search/facet/datehistogram/InternalDateHistogramFacet.java
  81. 0 265
      src/main/java/org/elasticsearch/search/facet/datehistogram/InternalFullDateHistogramFacet.java
  82. 0 150
      src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java
  83. 0 147
      src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java
  84. 0 38
      src/main/java/org/elasticsearch/search/facet/filter/FilterFacet.java
  85. 0 82
      src/main/java/org/elasticsearch/search/facet/filter/FilterFacetBuilder.java
  86. 0 106
      src/main/java/org/elasticsearch/search/facet/filter/FilterFacetExecutor.java
  87. 0 64
      src/main/java/org/elasticsearch/search/facet/filter/FilterFacetParser.java
  88. 0 124
      src/main/java/org/elasticsearch/search/facet/filter/InternalFilterFacet.java
  89. 0 107
      src/main/java/org/elasticsearch/search/facet/geodistance/GeoDistanceFacet.java
  90. 0 290
      src/main/java/org/elasticsearch/search/facet/geodistance/GeoDistanceFacetBuilder.java
  91. 0 138
      src/main/java/org/elasticsearch/search/facet/geodistance/GeoDistanceFacetExecutor.java
  92. 0 187
      src/main/java/org/elasticsearch/search/facet/geodistance/GeoDistanceFacetParser.java
  93. 0 180
      src/main/java/org/elasticsearch/search/facet/geodistance/InternalGeoDistanceFacet.java
  94. 0 105
      src/main/java/org/elasticsearch/search/facet/geodistance/ScriptGeoDistanceFacetExecutor.java
  95. 0 92
      src/main/java/org/elasticsearch/search/facet/geodistance/ValueGeoDistanceFacetExecutor.java
  96. 0 122
      src/main/java/org/elasticsearch/search/facet/histogram/CountHistogramFacetExecutor.java
  97. 0 133
      src/main/java/org/elasticsearch/search/facet/histogram/FullHistogramFacetExecutor.java
  98. 0 180
      src/main/java/org/elasticsearch/search/facet/histogram/HistogramFacet.java
  99. 0 151
      src/main/java/org/elasticsearch/search/facet/histogram/HistogramFacetBuilder.java
  100. 0 159
      src/main/java/org/elasticsearch/search/facet/histogram/HistogramFacetParser.java

+ 1 - 1
docs/reference/getting-started.asciidoc

@@ -170,7 +170,7 @@ Now that we have our node (and cluster) up and running, the next step is to unde
 * Check your cluster, node, and index health, status, and statistics
 * Administer your cluster, node, and index data and metadata
 * Perform CRUD (Create, Read, Update, and Delete) and search operations against your indexes
-* Execute advanced search operations such as paging, sorting, filtering, scripting, faceting, aggregations, and many others
+* Execute advanced search operations such as paging, sorting, filtering, scripting, aggregations, and many others
 
 === Cluster Health
 

+ 2 - 2
docs/reference/index-modules/fielddata.asciidoc

@@ -1,8 +1,8 @@
 [[index-modules-fielddata]]
 == Field data
 
-The field data cache is used mainly when sorting on or faceting on a
-field. It loads all the field values to memory in order to provide fast
+The field data cache is used mainly when sorting on or computing aggregations
+on a field. It loads all the field values to memory in order to provide fast
 document based access to those values. The field data cache can be
 expensive to build for a field, so its recommended to have enough memory
 to allocate it, and to keep it loaded.

+ 2 - 2
docs/reference/mapping.asciidoc

@@ -29,8 +29,8 @@ Field names with the same name across types are highly recommended to
 have the same type and same mapping characteristics (analysis settings
 for example). There is an effort to allow to explicitly "choose" which
 field to use by using type prefix (`my_type.my_field`), but it's not
-complete, and there are places where it will never work (like faceting
-on the field).
+complete, and there are places where it will never work (like
+aggregations on the field).
 
 In practice though, this restriction is almost never an issue. The field
 name usually ends up being a good indication to its "typeness" (e.g.

+ 1 - 1
docs/reference/mapping/fields/type-field.asciidoc

@@ -18,7 +18,7 @@ The `_type` field can be stored as well, for example:
 --------------------------------------------------
 
 The `_type` field can also not be indexed, and all the APIs will still
-work except for specific queries (term queries / filters) or faceting
+work except for specific queries (term queries / filters) or aggregations
 done on the `_type` field.
 
 [source,js]

+ 1 - 1
docs/reference/mapping/types/core-types.asciidoc

@@ -477,7 +477,7 @@ default and can't be disabled).
 ==== Fielddata filters
 
 It is possible to control which field values are loaded into memory,
-which is particularly useful for faceting on string fields, using
+which is particularly useful for aggregations on string fields, using
 fielddata filters, which are explained in detail in the
 <<index-modules-fielddata,Fielddata>> section.
 

+ 2 - 2
docs/reference/migration/migrate_1_0.asciidoc

@@ -193,7 +193,7 @@ Also, the top-level `filter` parameter in search has been renamed to
 <<search-request-post-filter,`post_filter`>>, to indicate that it should not
 be used as the primary way to filter search results (use a
 <<query-dsl-filtered-query,`filtered` query>> instead), but only to filter
-results AFTER facets/aggregations have been calculated.
+results AFTER aggregations have been calculated.
 
 This example counts the top colors in all matching docs, but only returns docs
 with color `red`:
@@ -373,4 +373,4 @@ Elasticsearch will *not* delete the `_percolator` index when upgrading, only the
 stored in the `_percolator` index. In order to use the already stored queries, you can just re-index the queries from the
 `_percolator` index into any index under the reserved `.percolator` type. The format in which the percolate queries
 were stored has *not* been changed. So a simple script that does a scan search to retrieve all the percolator queries
-and then does a bulk request into another index should be sufficient.
+and then does a bulk request into another index should be sufficient.

+ 1 - 1
docs/reference/modules/scripting.asciidoc

@@ -304,7 +304,7 @@ There are a few limitations relative to other script languages:
 [float]
 === Score
 
-In all scripts that can be used in facets, the current
+In all scripts that can be used in aggregations, the current
 document's score is accessible in `doc.score`.  When using a `script_score`,
 the current score is available in `_score`.
 

+ 2 - 2
docs/reference/query-dsl.asciidoc

@@ -27,8 +27,8 @@ complex (and interesting) queries.
 
 Both queries and filters can be used in different APIs. For example,
 within a <<search-request-query,search query>>, or
-as a <<search-facets,facet filter>>. This
-section explains the components (queries and filters) that can form the
+as an <<search-aggregations-bucket-filter-aggregation,aggregation filter>>.
+This section explains the components (queries and filters) that can form the
 AST one can use.
 
 Filters are very handy since they perform an order of magnitude better

+ 1 - 1
docs/reference/query-dsl/filters/range-filter.asciidoc

@@ -71,7 +71,7 @@ In general for small ranges the `index` execution is faster and for longer range
 
 The `fielddata` execution as the same suggests uses field data and therefor requires more memory, so make you have
 sufficient memory on your nodes in order to use this execution mode. It usually makes sense to use it on fields  you're
-already faceting or sorting by.
+already aggregating or sorting by.
 
 [float]
 ==== Caching

+ 1 - 1
docs/reference/query-dsl/filters/terms-filter.asciidoc

@@ -39,7 +39,7 @@ The `execution` option now has the following options :
     Generates a terms filters that uses the fielddata cache to
     compare terms.  This execution mode is great to use when filtering
     on a field that is already loaded into the fielddata cache from 
-    faceting, sorting, or index warmers.  When filtering on
+    aggregating, sorting, or index warmers.  When filtering on
     a large number of terms, this execution can be considerably faster
     than the other modes.  The total filter is not cached unless
     explicitly configured to do so.

+ 1 - 1
docs/reference/query-dsl/queries/top-children-query.asciidoc

@@ -45,7 +45,7 @@ including the default values:
 [float]
 ==== Scope
 
-A `_scope` can be defined on the query allowing to run facets on the
+A `_scope` can be defined on the query allowing to run aggregations on the
 same scope name that will work against the child documents. For example:
 
 [source,js]

+ 2 - 12
docs/reference/search/aggregations.asciidoc

@@ -1,18 +1,8 @@
 [[search-aggregations]]
 == Aggregations
 
-Aggregations grew out of the <<search-facets, facets>> module and the long experience of how users use it
-(and would like to use it) for real-time data analytics purposes. As such, it serves as the next generation
-replacement for the functionality we currently refer to as "faceting".
-
-<<search-facets, Facets>> provide a great way to aggregate data within a document set context.
-This context is defined by the executed query in combination with the different levels of filters that can be defined
-(filtered queries, top-level filters, and facet level filters). While powerful, their implementation is not designed
-from the ground up to support complex aggregations and is thus limited.
-
-The aggregations module breaks the barriers the current facet implementation put in place. The new name ("Aggregations")
-also indicates the intention here - a generic yet extremely powerful framework for building aggregations - any types of
-aggregations.
+The aggregations framework helps provide aggregated data based on a search query. It is based on simple building blocks
+called aggregations, that can be composed in order to build complex summaries of the data.
 
 An aggregation can be seen as a _unit-of-work_ that builds analytic information over a set of documents. The context of
 the execution defines what this document set is (e.g. a top-level aggregation executes within the context of the executed

+ 8 - 282
docs/reference/search/facets.asciidoc

@@ -1,285 +1,11 @@
 [[search-facets]]
 == Facets
 
-include::facets/deprecated.asciidoc[]
-
-The usual purpose of a full-text search engine is to return a small
-number of documents matching your query.
-
-_Facets_ provide aggregated data based on a search query. In the
-simplest case, a
-<<search-facets-terms-facet,terms facet>>
-can return _facet counts_ for various _facet values_ for a specific
-_field_. Elasticsearch supports more facet implementations, such as
-<<search-facets-statistical-facet,statistical>>
-or
-<<search-facets-date-histogram-facet,date
-histogram>> facets.
-
-The field used for facet calculations _must_ be of type numeric,
-date/time or be analyzed as a single token — see the
-<<mapping,_Mapping_>> guide for details on the
-analysis process.
-
-You can give the facet a custom _name_ and return multiple facets in one
-request.
-
-Let's try it out with a simple example. Suppose we have a number of
-articles with a field called `tags`, preferably analyzed with the
-<<analysis-keyword-analyzer,keyword>>
-analyzer. The facet aggregation will return counts for the most popular
-tags across the documents matching your query — or across all documents
-in the index.
-
-We will store some example data first:
-
-[source,js]
---------------------------------------------------
-curl -X DELETE "http://localhost:9200/articles"
-curl -X POST "http://localhost:9200/articles/article" -d '{"title" : "One",   "tags" : ["foo"]}'
-curl -X POST "http://localhost:9200/articles/article" -d '{"title" : "Two",   "tags" : ["foo", "bar"]}'
-curl -X POST "http://localhost:9200/articles/article" -d '{"title" : "Three", "tags" : ["foo", "bar", "baz"]}'
---------------------------------------------------
-
-Now, let's query the index for articles beginning with letter `T`
-and retrieve a
-<<search-facets-terms-facet,_terms facet_>>
-for the `tags` field. We will name the facet simply: _tags_.
-
-[source,js]
---------------------------------------------------
-curl -X POST "http://localhost:9200/articles/_search?pretty=true" -d '
-  {
-    "query" : { "query_string" : {"query" : "T*"} },
-    "facets" : {
-      "tags" : { "terms" : {"field" : "tags"} }
-    }
-  }
-'
---------------------------------------------------
-
-This request will return articles `Two` and `Three` (because
-they match our query), as well as the `tags` facet:
-
-[source,js]
---------------------------------------------------
-"facets" : {
-  "tags" : {
-    "_type" : "terms",
-    "missing" : 0,
-    "total": 5,
-    "other": 0,
-    "terms" : [ {
-      "term" : "foo",
-      "count" : 2
-    }, {
-      "term" : "bar",
-      "count" : 2
-    }, {
-      "term" : "baz",
-      "count" : 1
-    } ]
-  }
-}
---------------------------------------------------
-
-In the `terms` array, relevant _terms_ and _counts_ are returned. You'll
-probably want to display these to your users. The facet returns several
-important counts:
-
-* `missing` : The number of documents which have no value for the
-faceted field +
- * `total` : The total number of terms in the facet +
- * `other` : The number of terms not included in the returned facet
-(effectively `other` = `total` - `terms` )
-
-Notice, that the counts are scoped to the current query: _foo_ is
-counted only twice (not three times), _bar_ is counted twice and _baz_
-once. Also note that terms are counted once per document, even if the
-occur more frequently in that document.
-
-That's because the primary purpose of facets is to enable
-http://en.wikipedia.org/wiki/Faceted_search[_faceted navigation_],
-allowing the user to refine her query based on the insight from the
-facet, i.e. restrict the search to a specific category, price or date
-range. Facets can be used, however, for other purposes: computing
-histograms, statistical aggregations, and more. See the blog about
-link:/blog/data-visualization-with-elasticsearch-and-protovis/[data visualization].for inspiration.
-
-
-
-[float]
-=== Scope
-
-As we have already mentioned, facet computation is restricted to the
-scope of the current query, called `main`, by default. Facets can be
-computed within the `global` scope as well, in which case it will return
-values computed across all documents in the index:
-
-[source,js]
---------------------------------------------------
-{
-    "facets" : {
-        "my_facets" : {
-            "terms" : { ... },
-            "global" : true <1>
-        }
-    }
-}
---------------------------------------------------
-<1> The `global` keyword can be used with any facet type.
-
-There's one *important distinction* to keep in mind. While search
-_queries_ restrict both the returned documents and facet counts, search
-_filters_ restrict only returned documents — but _not_ facet counts.
-
-If you need to restrict both the documents and facets, and you're not
-willing or able to use a query, you may use a _facet filter_.
-
-[float]
-=== Facet Filter
-
-All facets can be configured with an additional filter (explained in the
-<<query-dsl,Query DSL>> section), which _will_ reduce
-the documents they use for computing results. An example with a _term_
-filter:
-
-[source,js]
---------------------------------------------------
-{
-    "facets" : {
-        "<FACET NAME>" : {
-            "<FACET TYPE>" : {
-                ...
-            },
-            "facet_filter" : {
-                "term" : { "user" : "kimchy"}
-            }
-        }
-    }
-}
---------------------------------------------------
-
-Note that this is different from a facet of the
-<<search-facets-filter-facet,filter>> type.
-
-[float]
-=== Facets with the _nested_ types
-
-<<mapping-nested-type,Nested>> mapping allows
-for better support for "inner" documents faceting, especially when it
-comes to multi valued key and value facets (like histograms, or term
-stats).
-
-What is it good for? First of all, this is the only way to use facets on
-nested documents once they are used (possibly for other reasons). But,
-there is also facet specific reason why nested documents can be used,
-and that's the fact that facets working on different key and value field
-(like term_stats, or histogram) can now support cases where both are
-multi valued properly.
-
-For example, let's use the following mapping:
-
-[source,js]
---------------------------------------------------
-{
-    "type1" : {
-        "properties" : {
-            "obj1" : {
-                "type" : "nested"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-And, here is a sample data:
-
-[source,js]
---------------------------------------------------
-{
-    "obj1" : [
-        {
-            "name" : "blue",
-            "count" : 4
-        },
-        {
-            "name" : "green",
-            "count" : 6
-        }
-    ]
-}
---------------------------------------------------
-
-
-[float]
-==== All Nested Matching Root Documents
-
-Another option is to run the facet on all the nested documents matching
-the root objects that the main query will end up producing. For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query": {
-        "match_all": {}
-    },
-    "facets": {
-        "facet1": {
-            "terms_stats": {
-                "key_field" : "name",
-                "value_field": "count"
-            },
-            "nested": "obj1"
-        }
-    }
-}
---------------------------------------------------
-
-The `nested` element provides the path to the nested document (can be a
-multi level nested docs) that will be used.
-
-Facet filter allows you to filter your facet on the nested object level.
-It is important that these filters match on the nested object level and
-not on the root document level. In the following example the
-`terms_stats` only applies on nested objects with the name 'blue'.
-
-[source,js]
---------------------------------------------------
-{
-    "query": {
-        "match_all": {}
-    },
-    "facets": {
-        "facet1": {
-            "terms_stats": {
-                "key_field" : "name",
-                "value_field": "count"
-            },
-            "nested": "obj1",
-            "facet_filter" : {
-                "term" : {"name" : "blue"}
-            }
-        }
-    }
-}
---------------------------------------------------
-
-include::facets/terms-facet.asciidoc[]
-
-include::facets/range-facet.asciidoc[]
-
-include::facets/histogram-facet.asciidoc[]
-
-include::facets/date-histogram-facet.asciidoc[]
-
-include::facets/filter-facet.asciidoc[]
-
-include::facets/query-facet.asciidoc[]
-
-include::facets/statistical-facet.asciidoc[]
-
-include::facets/terms-stats-facet.asciidoc[]
-
-include::facets/geo-distance-facet.asciidoc[]
-
+Faceted search refers to a way to explore large amounts of data by displaying
+summaries about various partitions of the data and later allowing to narrow
+the navigation to a specific partition.
+
+In Elasticsearch, `facets` are also the name of a feature that allowed to
+compute these summaries. `facets` have been replaced by
+<<search-aggregations, aggregations>> in Elasticsearch 1.0, which are a superset
+of facets.

+ 0 - 140
docs/reference/search/facets/date-histogram-facet.asciidoc

@@ -1,140 +0,0 @@
-[[search-facets-date-histogram-facet]]
-=== Date Histogram Facet
-
-include::deprecated.asciidoc[]
-
-NOTE: The equivalent aggregation would be the
-<<search-aggregations-bucket-datehistogram-aggregation,`date_histogram`>>
-aggregation.
-
-A specific histogram facet that can work with `date` field types
-enhancing it over the regular
-<<search-facets-histogram-facet,histogram
-facet>>. Here is a quick example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "histo1" : {
-            "date_histogram" : {
-                "field" : "field_name",
-                "interval" : "day"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Interval
-
-The `interval` allows to set the interval at which buckets will be
-created for each hit. It allows for the constant values of `year`,
-`quarter`, `month`, `week`, `day`, `hour`, `minute` ,`second`.
-
-It also support time setting like `1.5h` (up to `w` for weeks).
-
-==== Time Zone
-
-By default, times are stored as UTC milliseconds since the epoch. Thus,
-all computation and "bucketing" / "rounding" is done on UTC. It is
-possible to provide a time zone (both pre rounding, and post rounding)
-value, which will cause all computations to take the relevant zone into
-account. The time returned for each bucket/entry is milliseconds since
-the epoch of the provided time zone.
-
-The parameters are `pre_zone` (pre rounding based on interval) and
-`post_zone` (post rounding based on interval). The `time_zone` parameter
-simply sets the `pre_zone` parameter. By default, those are set to
-`UTC`.
-
-The zone value accepts either a numeric value for the hours offset, for
-example: `"time_zone" : -2`. It also accepts a format of hours and
-minutes, like `"time_zone" : "-02:30"`. Another option is to provide a
-time zone accepted as one of the values listed
-http://joda-time.sourceforge.net/timezones.html[here].
-
-Lets take an example. For `2012-04-01T04:15:30Z`, with a `pre_zone` of
-`-08:00`. For `day` interval, the actual time by applying the time zone
-and rounding falls under `2012-03-31`, so the returned value will be (in
-millis) of `2012-03-31T00:00:00Z` (UTC). For `hour` interval, applying
-the time zone results in `2012-03-31T20:15:30`, rounding it results in
-`2012-03-31T20:00:00`, but, we want to return it in UTC (`post_zone` is
-not set), so we convert it back to UTC: `2012-04-01T04:00:00Z`. Note, we
-are consistent in the results, returning the rounded value in UTC.
-
-`post_zone` simply takes the result, and adds the relevant offset.
-
-Sometimes, we want to apply the same conversion to UTC we did above for
-`hour` also for `day` (and up) intervals. We can set
-`pre_zone_adjust_large_interval` to `true`, which will apply the same
-conversion done for `hour` interval in the example, to `day` and above
-intervals (it can be set regardless of the interval, but only kick in
-when using `day` and higher intervals).
-
-==== Factor
-
-The date histogram works on numeric values (since time is stored in
-milliseconds since the epoch in UTC). But, sometimes, systems will store
-a different resolution (like seconds since UTC) in a numeric field. The
-`factor` parameter can be used to change the value in the field to
-milliseconds to actual do the relevant rounding, and then be applied
-again to get to the original unit. For example, when storing in a
-numeric field seconds resolution, the `factor` can be set to `1000`.
-
-==== Pre / Post Offset
-
-Specific offsets can be provided for pre rounding and post rounding. The
-`pre_offset` for pre rounding, and `post_offset` for post rounding. The
-format is the date time format (`1h`, `1d`, ...).
-
-==== Value Field
-
-The date_histogram facet allows to use a different key (of type date)
-which controls the bucketing, with a different value field which will
-then return the total and mean for that field values of the hits within
-the relevant bucket. For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "histo1" : {
-            "date_histogram" : {
-                "key_field" : "timestamp",
-                "value_field" : "price",
-                "interval" : "day"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Script Value Field
-
-A script can be used to compute the value that will then be used to
-compute the total and mean for a bucket. For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "histo1" : {
-            "date_histogram" : {
-                "key_field" : "timestamp",
-                "value_script" : "doc['price'].value * 2",
-                "interval" : "day"
-            }
-        }
-    }
-}
---------------------------------------------------

+ 0 - 3
docs/reference/search/facets/deprecated.asciidoc

@@ -1,3 +0,0 @@
-[WARNING]
-Facets are deprecated and will be removed in a future release. You are
-encouraged to migrate to <<search-aggregations, aggregations>> instead.

+ 0 - 30
docs/reference/search/facets/filter-facet.asciidoc

@@ -1,30 +0,0 @@
-[[search-facets-filter-facet]]
-=== Filter Facets
-
-include::deprecated.asciidoc[]
-
-NOTE: The equivalent aggregation would be the
-<<search-aggregations-bucket-filter-aggregation,`filter`>>
-aggregation.
-
-A filter facet (not to be confused with a
-<<search-facets,facet filter>>) allows you to
-return a count of the hits matching the filter. The filter itself can be
-expressed using the <<query-dsl,Query DSL>>. For
-example:
-
-[source,js]
---------------------------------------------------
-{
-    "facets" : {
-        "wow_facet" : {
-            "filter" : {
-                "term" : { "tag" : "wow" }
-            }
-        }
-    }
-}
---------------------------------------------------
-
-Note, filter facet filters are faster than query facet when using native
-filters (non query wrapper ones).

+ 0 - 258
docs/reference/search/facets/geo-distance-facet.asciidoc

@@ -1,258 +0,0 @@
-[[search-facets-geo-distance-facet]]
-=== Geo Distance Facets
-
-include::deprecated.asciidoc[]
-
-NOTE: The equivalent aggregation would be the
-<<search-aggregations-bucket-geodistance-aggregation,`geo_distance`>>
-aggregation.
-
-The geo_distance facet is a facet providing information for ranges of
-distances from a provided geo_point including count of the number of
-hits that fall within each range, and aggregation information (like
-total).
-
-Assuming the following sample doc:
-
-[source,js]
---------------------------------------------------
-{
-    "pin" : {
-        "location" : {
-            "lat" : 40.12,
-            "lon" : -71.34
-        }
-    }
-}
---------------------------------------------------
-
-Here is an example that create a `geo_distance` facet from a
-`pin.location` of 40,-70, and a set of ranges:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "geo1" : {
-            "geo_distance" : {
-                "pin.location" : {
-                    "lat" : 40,
-                    "lon" : -70
-                },
-                "ranges" : [
-                    { "to" : 10 },
-                    { "from" : 10, "to" : 20 },
-                    { "from" : 20, "to" : 100 },
-                    { "from" : 100 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Accepted Formats
-
-In much the same way the geo_point type can accept different
-representation of the geo point, the filter can accept it as well:
-
-===== Lat Lon As Properties
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "geo1" : {
-            "geo_distance" : {
-                "pin.location" : {
-                    "lat" : 40,
-                    "lon" : -70
-                },
-                "ranges" : [
-                    { "to" : 10 },
-                    { "from" : 10, "to" : 20 },
-                    { "from" : 20, "to" : 100 },
-                    { "from" : 100 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-===== Lat Lon As Array
-
-Format in `[lon, lat]`, note, the order of lon/lat here in order to
-conform with http://geojson.org/[GeoJSON].
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "geo1" : {
-            "geo_distance" : {
-                "pin.location" : [40, -70],
-                "ranges" : [
-                    { "to" : 10 },
-                    { "from" : 10, "to" : 20 },
-                    { "from" : 20, "to" : 100 },
-                    { "from" : 100 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-===== Lat Lon As String
-
-Format in `lat,lon`.
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "geo1" : {
-            "geo_distance" : {
-                "pin.location" : "40, -70",
-                "ranges" : [
-                    { "to" : 10 },
-                    { "from" : 10, "to" : 20 },
-                    { "from" : 20, "to" : 100 },
-                    { "from" : 100 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-===== Geohash
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "geo1" : {
-            "geo_distance" : {
-                "pin.location" : "drm3btev3e86",
-                "ranges" : [
-                    { "to" : 10 },
-                    { "from" : 10, "to" : 20 },
-                    { "from" : 20, "to" : 100 },
-                    { "from" : 100 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Ranges
-
-When a `to` or `from` are not set, they are assumed to be unbounded.
-Ranges are allowed to overlap, basically, each range is treated by
-itself.
-
-==== Options
-
-[cols="<,<",options="header",]
-|=======================================================================
-|Option |Description
-|`unit` |The unit the ranges are provided in. Defaults to `km`. Can also
-be `mi`, `miles`, `in`, `inch`, `yd`, `yards`, `ft`, `feet`, `kilometers`, `mm`, `millimeters`, `cm`, `centimeters`, `m` or `meters`.
-
-|`distance_type` |How to compute the distance. Can either be `arc`
-(better precision), `sloppy_arc` (faster) or `plane` (fastest). Defaults to `sloppy_arc`.
-|=======================================================================
-
-==== Value Options
-
-On top of the count of hits falling within each range, aggregated data
-can be provided (total) as well. By default, the aggregated data will
-simply use the distance calculated, but the value can be extracted
-either using a different numeric field, or a script. Here is an example
-of using a different numeric field:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "geo1" : {
-            "geo_distance" : {
-                "pin.location" : "drm3btev3e86",
-                "value_field" : "num1",
-                "ranges" : [
-                    { "to" : 10 },
-                    { "from" : 10, "to" : 20 },
-                    { "from" : 20, "to" : 100 },
-                    { "from" : 100 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-And here is an example of using a script:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "geo1" : {
-            "geo_distance" : {
-                "pin.location" : "drm3btev3e86",
-                "value_script" : "doc['num1'].value * factor",
-                "params" : {
-                    "factor" : 5
-                }
-                "ranges" : [
-                    { "to" : 10 },
-                    { "from" : 10, "to" : 20 },
-                    { "from" : 20, "to" : 100 },
-                    { "from" : 100 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-Note the params option, allowing to pass parameters to the script
-(resulting in faster script execution instead of providing the values
-within the script each time).
-
-.`geo_point` Type
-[NOTE]
---
-The facet *requires* the `geo_point` type to be set on the relevant
-field.
---
-
-.Multi Location Per Document
-[NOTE]
---
-The facet can work with multiple locations per document.
---

+ 0 - 144
docs/reference/search/facets/histogram-facet.asciidoc

@@ -1,144 +0,0 @@
-[[search-facets-histogram-facet]]
-=== Histogram Facets
-
-include::deprecated.asciidoc[]
-
-NOTE: The equivalent aggregation would be the
-<<search-aggregations-bucket-histogram-aggregation,`histogram`>>
-aggregation.
-
-The histogram facet works with numeric data by building a histogram
-across intervals of the field values. Each value is "rounded" into an
-interval (or placed in a bucket), and statistics are provided per
-interval/bucket (count and total). Here is a simple example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "histo1" : {
-            "histogram" : {
-                "field" : "field_name",
-                "interval" : 100
-            }
-        }
-    }
-}
---------------------------------------------------
-
-The above example will run a histogram facet on the `field_name` field,
-with an `interval` of `100` (so, for example, a value of `1055` will be
-placed within the `1000` bucket).
-
-The interval can also be provided as a time based interval (using the
-time format). This mainly make sense when working on date fields or
-field that represent absolute milliseconds, here is an example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "histo1" : {
-            "histogram" : {
-                "field" : "field_name",
-                "time_interval" : "1.5h"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Key and Value
-
-The histogram facet allows to use a different key and value. The key is
-used to place the hit/document within the appropriate bucket, and the
-value is used to compute statistical data (for example, total). Here is
-an example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "histo1" : {
-            "histogram" : {
-                "key_field" : "key_field_name",
-                "value_field" : "value_field_name",
-                "interval" : 100
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Script Key and Value
-
-Sometimes, some munging of both the key and the value are needed. In the
-key case, before it is rounded into a bucket, and for the value, when
-the statistical data is computed per bucket
-<<modules-scripting,scripts>> can be used. Here
-is an example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "histo1" : {
-            "histogram" : {
-                "key_script" : "doc['date'].date.minuteOfHour",
-                "value_script" : "doc['num1'].value"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-In the above sample, we can use a date type field called `date` to get
-the minute of hour from it, and the total will be computed based on
-another field `num1`. Note, in this case, no `interval` was provided, so
-the bucket will be based directly on the `key_script` (no rounding).
-
-Parameters can also be provided to the different scripts (preferable if
-the script is the same, with different values for a specific parameter,
-like "factor"):
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "histo1" : {
-            "histogram" : {
-                "key_script" : "doc['date'].date.minuteOfHour * factor1",
-                "value_script" : "doc['num1'].value + factor2",
-                "params" : {
-                    "factor1" : 2,
-                    "factor2" : 3
-                }
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Memory Considerations
-
-In order to implement the histogram facet, the relevant field values are
-loaded into memory from the index. This means that per shard, there
-should be enough memory to contain them. Since by default, dynamic
-introduced types are `long` and `double`, one option to reduce the
-memory footprint is to explicitly set the types for the relevant fields
-to either `short`, `integer`, or `float` when possible.

+ 0 - 25
docs/reference/search/facets/query-facet.asciidoc

@@ -1,25 +0,0 @@
-[[search-facets-query-facet]]
-=== Query Facets
-
-include::deprecated.asciidoc[]
-
-NOTE: There is no equivalent aggregation but you can use the
-<<search-aggregations-bucket-filter-aggregation,`filter`>> aggregation and wrap
-the query inside a <<query-dsl-query-filter,query filter>>.
-
-A facet query allows to return a count of the hits matching the facet
-query. The query itself can be expressed using the Query DSL. For
-example:
-
-[source,js]
---------------------------------------------------
-{
-    "facets" : {
-        "wow_facet" : {
-            "query" : {
-                "term" : { "tag" : "wow" }
-            }
-        }
-    }
-}    
---------------------------------------------------

+ 0 - 125
docs/reference/search/facets/range-facet.asciidoc

@@ -1,125 +0,0 @@
-[[search-facets-range-facet]]
-=== Range Facets
-
-include::deprecated.asciidoc[]
-
-NOTE: The equivalent aggregation would be the
-<<search-aggregations-bucket-range-aggregation,`range`>>
-aggregation.
-
-`range` facet allows to specify a set of ranges and get both the number
-of docs (count) that fall within each range, and aggregated data either
-based on the field, or using another field. Here is a simple example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "range1" : {
-            "range" : {
-                "field" : "field_name",
-                "ranges" : [
-                    { "to" : 50 },
-                    { "from" : 20, "to" : 70 },
-                    { "from" : 70, "to" : 120 },
-                    { "from" : 150 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-Another option which is a bit more DSL enabled is to provide the ranges
-on the actual field name, for example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "range1" : {
-            "range" : {
-                "my_field" : [
-                    { "to" : 50 },
-                    { "from" : 20, "to" : 70 },
-                    { "from" : 70, "to" : 120 },
-                    { "from" : 150 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-The `range` facet always includes the `from` parameter and excludes the
-`to` parameter for each range.
-
-==== Key and Value
-
-The `range` facet allows to use a different field to check if its value
-falls within a range, and another field to compute aggregated data per
-range (like total). For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "range1" : {
-            "range" : {
-                "key_field" : "field_name",
-                "value_field" : "another_field_name",
-                "ranges" : [
-                    { "to" : 50 },
-                    { "from" : 20, "to" : 70 },
-                    { "from" : 70, "to" : 120 },
-                    { "from" : 150 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Script Key and Value
-
-Sometimes, some munging of both the key and the value are needed. In the
-key case, before it is checked if it falls within a range, and for the
-value, when the statistical data is computed per range scripts can be
-used. Here is an example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "range1" : {
-            "range" : {
-                "key_script" : "doc['date'].date.minuteOfHour",
-                "value_script" : "doc['num1'].value",
-                "ranges" : [
-                    { "to" : 50 },
-                    { "from" : 20, "to" : 70 },
-                    { "from" : 70, "to" : 120 },
-                    { "from" : 150 }
-                ]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Date Ranges
-
-The range facet support also providing the range as string formatted
-dates.

+ 0 - 107
docs/reference/search/facets/statistical-facet.asciidoc

@@ -1,107 +0,0 @@
-[[search-facets-statistical-facet]]
-=== Statistical Facet
-
-include::deprecated.asciidoc[]
-
-NOTE: The equivalent aggregation would be the
-<<search-aggregations-metrics-stats-aggregation,`stats`>>
-aggregation.
-
-Statistical facet allows to compute statistical data on a numeric
-fields. The statistical data include count, total, sum of squares, mean
-(average), minimum, maximum, variance, and standard deviation. Here is
-an example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "stat1" : {
-            "statistical" : {
-                "field" : "num1"
-            }
-        }
-    }
-}    
---------------------------------------------------
-
-==== Script field
-
-When using `field`, the numeric value of the field is used to compute
-the statistical information. Sometimes, several fields values represent
-the statistics we want to compute, or some sort of mathematical
-evaluation. The script field allows to define a
-<<modules-scripting,script>> to evaluate, with
-its value used to compute the statistical information. For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "stat1" : {
-            "statistical" : {
-                "script" : "doc['num1'].value + doc['num2'].value"
-            }
-        }
-    }
-}    
---------------------------------------------------
-
-Parameters can also be provided to the different scripts (preferable if
-the script is the same, with different values for a specific parameter,
-like "factor"):
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "stat1" : {
-            "statistical" : {
-                "script" : "(doc['num1'].value + doc['num2'].value) * factor",
-                "params" : {
-                    "factor" : 5
-                }
-            }
-        }
-    }
-}    
---------------------------------------------------
-
-==== Multi Field
-
-The statistical facet can be executed against more than one field,
-returning the aggregation result across those fields. For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {}
-    },
-    "facets" : {
-        "stat1" : {
-            "statistical" : {
-                "fields" : ["num1", "num2"]
-            }
-        }
-    }
-}    
---------------------------------------------------
-
-==== Memory Considerations
-
-In order to implement the statistical facet, the relevant field values
-are loaded into memory from the index. This means that per shard, there
-should be enough memory to contain them. Since by default, dynamic
-introduced types are `long` and `double`, one option to reduce the
-memory footprint is to explicitly set the types for the relevant fields
-to either `short`, `integer`, or `float` when possible.

+ 0 - 294
docs/reference/search/facets/terms-facet.asciidoc

@@ -1,294 +0,0 @@
-[[search-facets-terms-facet]]
-=== Terms Facet
-
-include::deprecated.asciidoc[]
-
-NOTE: The equivalent aggregation would be the
-<<search-aggregations-bucket-terms-aggregation,`terms`>>
-aggregation.
-
-Allow to specify field facets that return the N most frequent terms. For
-example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "tag" : {
-            "terms" : {
-                "field" : "tag",
-                "size" : 10
-            }
-        }
-    }
-}
---------------------------------------------------
-
-It is preferred to have the terms facet executed on a non analyzed
-field, or a field without a large number of terms it breaks to.
-
-==== Accuracy Control
-
-The `size` parameter defines how many top terms should be returned out
-of the overall terms list. By default, the node coordinating the
-search process will ask each shard to provide its own top `size` terms
-and once all shards respond, it will reduce the results to the final list
-that will then be sent back to the client. This means that if the number
-of unique terms is greater than `size`, the returned list is slightly off
-and not accurate (it could be that the term counts are slightly off and it
-could even be that a term that should have been in the top `size` entries
-was not returned).
-
-The higher the requested `size` is, the more accurate the results will be,
-but also, the more expensive it will be to compute the final results (both
-due to bigger priority queues that are managed on a shard level and due to
-bigger data transfers between the nodes and the client). In an attempt to
-minimize the extra work that comes with bigger requested `size` the
-`shard_size` parameter was introduced. When defined, it will determine
-how many terms the coordinating node will request from each shard. Once
-all the shards responded, the coordinating node will then reduce them
-to a final result which will be based on the `size` parameter - this way,
-one can increase the accuracy of the returned terms and avoid the overhead
-of streaming a big list of terms back to the client.
-
-Note that `shard_size` cannot be smaller than `size`... if that's the case
-elasticsearch will override it and reset it to be equal to `size`.
-
-
-==== Ordering
-
-Allow to control the ordering of the terms facets, to be ordered by
-`count`, `term`, `reverse_count` or `reverse_term`. The default is
-`count`. Here is an example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "tag" : {
-            "terms" : {
-                "field" : "tag",
-                "size" : 10,
-                "order" : "term"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== All Terms
-
-Allow to get all the terms in the terms facet, ones that do not match a
-hit, will have a count of 0. Note, this should not be used with fields
-that have many terms.
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "tag" : {
-            "terms" : {
-                "field" : "tag",
-                "all_terms" : true
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Excluding Terms
-
-It is possible to specify a set of terms that should be excluded from
-the terms facet request result:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : { }
-    },
-    "facets" : {
-        "tag" : {
-            "terms" : {
-                "field" : "tag",
-                "exclude" : ["term1", "term2"]
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Regex Patterns
-
-The terms API allows to define regex expression that will control which
-terms will be included in the faceted list, here is an example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : { }
-    },
-    "facets" : {
-        "tag" : {
-            "terms" : {
-                "field" : "tag",
-                "regex" : "_regex expression here_",
-                "regex_flags" : "DOTALL"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-Check
-http://download.oracle.com/javase/6/docs/api/java/util/regex/Pattern.html#field_summary[Java
-Pattern API] for more details about `regex_flags` options.
-
-==== Term Scripts
-
-Allow to define a script for terms facet to process the actual term that
-will be used in the term facet collection, and also optionally control
-its inclusion or not.
-
-The script can either return a boolean value, with `true` to include it
-in the facet collection, and `false` to exclude it from the facet
-collection.
-
-Another option is for the script to return a `string` controlling the
-term that will be used to count against. The script execution will
-include the term variable which is the current field term used.
-
-For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "tag" : {
-            "terms" : {
-                "field" : "tag",
-                "size" : 10,
-                "script" : "term + 'aaa'"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-And using the boolean feature:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "tag" : {
-            "terms" : {
-                "field" : "tag",
-                "size" : 10,
-                "script" : "term == 'aaa' ? true : false"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Multi Fields
-
-The term facet can be executed against more than one field, returning
-the aggregation result across those fields. For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "tag" : {
-            "terms" : {
-                "fields" : ["tag1", "tag2"],
-                "size" : 10
-            }
-        }
-    }
-}
---------------------------------------------------
-
-==== Script Field
-
-A script that provides the actual terms that will be processed for a
-given doc. A `script_field` (or `script` which will be used when no
-`field` or `fields` are provided) can be set to provide it.
-
-As an example, a search request (that is quite "heavy") can be executed
-and use either `_source` itself or `_fields` (for stored fields) without
-needing to load the terms to memory (at the expense of much slower
-execution of the search, and causing more IO load):
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "my_facet" : {
-            "terms" : {
-                "script_field" : "_source.my_field",
-                "size" : 10
-            }
-        }
-    }
-}
---------------------------------------------------
-
-Or:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "my_facet" : {
-            "terms" : {
-                "script_field" : "_fields['my_field']",
-                "size" : 10
-            }
-        }
-    }
-}
---------------------------------------------------
-
-Note also, that the above will use the whole field value as a single
-term.
-
-==== _index
-
-The term facet allows to specify a special field name called `_index`.
-This will return a facet count of hits per `_index` the search was
-executed on (relevant when a search request spans more than one index).
-
-==== Memory Considerations
-
-Term facet causes the relevant field values to be loaded into memory.
-This means that per shard, there should be enough memory to contain
-them. It is advisable to explicitly set the fields to be `not_analyzed`
-or make sure the number of unique tokens a field can have is not large.

+ 0 - 59
docs/reference/search/facets/terms-stats-facet.asciidoc

@@ -1,59 +0,0 @@
-[[search-facets-terms-stats-facet]]
-=== Terms Stats Facet
-
-include::deprecated.asciidoc[]
-
-NOTE: The equivalent aggregation would be the
-<<search-aggregations-bucket-terms-aggregation,`terms`>>
-aggregation with an inner
-<<search-aggregations-metrics-stats-aggregation,`stats`>>
-aggregation.
-
-The `terms_stats` facet combines both the
-<<search-facets-terms-facet,terms>> and
-<<search-facets-statistical-facet,statistical>>
-allowing to compute stats computed on a field, per term value driven by
-another field. For example:
-
-[source,js]
---------------------------------------------------
-{
-    "query" : {
-        "match_all" : {  }
-    },
-    "facets" : {
-        "tag_price_stats" : {
-            "terms_stats" : {
-                "key_field" : "tag",
-                "value_field" : "price"
-            }
-        }
-    }
-}
---------------------------------------------------
-
-The `size` parameter controls how many facet entries will be returned.
-It defaults to `10`. Setting it to 0 will return all terms matching the
-hits (be careful not to return too many results).
-
-One can also set `shard_size` (in addition to `size`) which will determine
-how many term entries will be requested from each shard. When dealing
-with field with high cardinality (at least higher than the requested `size`)
-The greater `shard_size` is - the more accurate the result will be (and the
-more expensive the overall facet computation will be). `shard_size` is there
-to enable you to increase accuracy yet still avoid returning too many
-terms_stats entries back to the client.
-
-Ordering is done by setting `order`, with possible values of `term`,
-`reverse_term`, `count`, `reverse_count`, `total`, `reverse_total`,
-`min`, `reverse_min`, `max`, `reverse_max`, `mean`, `reverse_mean`.
-Defaults to `count`.
-
-The value computed can also be a script, using the `value_script`
-instead of `value_field`, in which case the `lang` can control its
-language, and `params` allow to provide custom parameters (as in other
-scripted components).
-
-Note, the terms stats can work with multi valued key fields, or multi
-valued value fields, but not when both are multi valued (as ordering is
-not maintained).

+ 1 - 1
docs/reference/search/multi-search.asciidoc

@@ -19,7 +19,7 @@ body\n
 The header part includes which index / indices to search on, optional
 (mapping) types to search on, the `search_type`, `preference`, and
 `routing`. The body includes the typical search body request (including
-the `query`, `facets`, `from`, `size`, and so on). Here is an example:
+the `query`, `aggregations`, `from`, `size`, and so on). Here is an example:
 
 [source,js]
 --------------------------------------------------

+ 0 - 2
docs/reference/search/percolate.asciidoc

@@ -197,8 +197,6 @@ is required for this option. Defaults to `false`.
 is supported. Other sort fields will throw an exception. The `size` and `query` option are required for this setting. Like
 `track_score` the score is based on the query and represents how the query matched to the percolate query's metadata
 and *not* how the document being percolated matched to the query.
-* `facets` - Allows facet definitions to be included. The facets are based on the matching percolator queries. See facet
-documentation how to define facets.
 * `aggs` - Allows aggregation definitions to be included. The aggregations are based on the matching percolator queries,
 look at the aggregation documentation on how to define aggregations.
 * `highlight` - Allows highlight definitions to be included. The document being percolated is being highlight for each

+ 1 - 1
docs/reference/search/request/search-type.asciidoc

@@ -86,7 +86,7 @@ Parameter value: *count*.
 
 A special search type that returns the count that matched the search
 request without any docs (represented in `total_hits`), and possibly,
-including facets as well. In general, this is preferable to the `count`
+including aggregations as well. In general, this is preferable to the `count`
 API as it provides more options.
 
 [[scan]]

+ 0 - 9
src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java

@@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.index.query.FilterBuilder;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.aggregations.AggregationBuilder;
-import org.elasticsearch.search.facet.FacetBuilder;
 import org.elasticsearch.search.highlight.HighlightBuilder;
 import org.elasticsearch.search.sort.SortBuilder;
 
@@ -164,14 +163,6 @@ public class PercolateRequestBuilder extends BroadcastOperationRequestBuilder<Pe
         return this;
     }
 
-    /**
-     * Add a facet definition.
-     */
-    public PercolateRequestBuilder addFacet(FacetBuilder facetBuilder) {
-        sourceBuilder().addFacet(facetBuilder);
-        return this;
-    }
-
     /**
      * Add a aggregation definition.
      */

+ 1 - 17
src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java

@@ -31,7 +31,6 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
 import org.elasticsearch.percolator.PercolatorService;
 import org.elasticsearch.rest.action.support.RestActions;
 import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.facet.InternalFacets;
 import org.elasticsearch.search.highlight.HighlightField;
 
 import java.io.IOException;
@@ -47,16 +46,14 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
     private long tookInMillis;
     private Match[] matches;
     private long count;
-    private InternalFacets facets;
     private InternalAggregations aggregations;
 
     public PercolateResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures,
-                             Match[] matches, long count, long tookInMillis, InternalFacets facets, InternalAggregations aggregations) {
+                             Match[] matches, long count, long tookInMillis, InternalAggregations aggregations) {
         super(totalShards, successfulShards, failedShards, shardFailures);
         this.tookInMillis = tookInMillis;
         this.matches = matches;
         this.count = count;
-        this.facets = facets;
         this.aggregations = aggregations;
     }
 
@@ -101,13 +98,6 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
         return count;
     }
 
-    /**
-     * @return Any facet that has been executed on the query metadata. This can return <code>null</code>.
-     */
-    public InternalFacets getFacets() {
-        return facets;
-    }
-
     /**
      * @return Any aggregations that has been executed on the query metadata. This can return <code>null</code>.
      */
@@ -163,10 +153,6 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
             }
             builder.endArray();
         }
-        if (facets != null) {
-            facets.toXContent(builder, params);
-        }
-
         if (aggregations != null) {
             aggregations.toXContent(builder, params);
         }
@@ -186,7 +172,6 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
                 matches[i].readFrom(in);
             }
         }
-        facets = InternalFacets.readOptionalFacets(in);
         aggregations = InternalAggregations.readOptionalAggregations(in);
     }
 
@@ -203,7 +188,6 @@ public class PercolateResponse extends BroadcastOperationResponse implements Ite
                 match.writeTo(out);
             }
         }
-        out.writeOptionalStreamable(facets);
         out.writeOptionalStreamable(aggregations);
     }
 

+ 0 - 11
src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java

@@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
 import org.elasticsearch.index.shard.ShardId;
 import org.elasticsearch.percolator.PercolateContext;
 import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.facet.InternalFacets;
 import org.elasticsearch.search.highlight.HighlightField;
 import org.elasticsearch.search.query.QuerySearchResult;
 
@@ -51,7 +50,6 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
     private byte percolatorTypeId;
     private int requestedSize;
 
-    private InternalFacets facets;
     private InternalAggregations aggregations;
 
     PercolateShardResponse() {
@@ -68,9 +66,6 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
         this.requestedSize = context.size();
         QuerySearchResult result = context.queryResult();
         if (result != null) {
-            if (result.facets() != null) {
-                this.facets = new InternalFacets(result.facets().facets());
-            }
             if (result.aggregations() != null) {
                 this.aggregations = (InternalAggregations) result.aggregations();
             }
@@ -113,10 +108,6 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
         return hls;
     }
 
-    public InternalFacets facets() {
-        return facets;
-    }
-
     public InternalAggregations aggregations() {
         return aggregations;
     }
@@ -152,7 +143,6 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
             }
             hls.add(fields);
         }
-        facets = InternalFacets.readOptionalFacets(in);
         aggregations = InternalAggregations.readOptionalAggregations(in);
     }
 
@@ -178,7 +168,6 @@ public class PercolateShardResponse extends BroadcastShardOperationResponse {
                 entry.getValue().writeTo(out);
             }
         }
-        out.writeOptionalStreamable(facets);
         out.writeOptionalStreamable(aggregations);
     }
 }

+ 0 - 21
src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java

@@ -30,7 +30,6 @@ import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregationBuilder;
 import org.elasticsearch.search.builder.SearchSourceBuilderException;
-import org.elasticsearch.search.facet.FacetBuilder;
 import org.elasticsearch.search.highlight.HighlightBuilder;
 import org.elasticsearch.search.sort.ScoreSortBuilder;
 import org.elasticsearch.search.sort.SortBuilder;
@@ -53,7 +52,6 @@ public class PercolateSourceBuilder implements ToXContent {
     private List<SortBuilder> sorts;
     private Boolean trackScores;
     private HighlightBuilder highlightBuilder;
-    private List<FacetBuilder> facets;
     private List<AggregationBuilder> aggregations;
 
     public DocBuilder percolateDocument() {
@@ -148,17 +146,6 @@ public class PercolateSourceBuilder implements ToXContent {
         return this;
     }
 
-    /**
-     * Add a facet definition.
-     */
-    public PercolateSourceBuilder addFacet(FacetBuilder facetBuilder) {
-        if (facets == null) {
-            facets = Lists.newArrayList();
-        }
-        facets.add(facetBuilder);
-        return this;
-    }
-
     /**
      * Add an aggregationB definition.
      */
@@ -212,14 +199,6 @@ public class PercolateSourceBuilder implements ToXContent {
         if (highlightBuilder != null) {
             highlightBuilder.toXContent(builder, params);
         }
-        if (facets != null) {
-            builder.field("facets");
-            builder.startObject();
-            for (FacetBuilder facet : facets) {
-                facet.toXContent(builder, params);
-            }
-            builder.endObject();
-        }
         if (aggregations != null) {
             builder.field("aggregations");
             builder.startObject();

+ 1 - 1
src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java

@@ -160,7 +160,7 @@ public class TransportPercolateAction extends TransportBroadcastOperationAction<
             long tookInMillis = System.currentTimeMillis() - request.startTime;
             return new PercolateResponse(
                     shardsResponses.length(), successfulShards, failedShards, shardFailures,
-                    result.matches(), result.count(), tookInMillis, result.reducedFacets(), result.reducedAggregations()
+                    result.matches(), result.count(), tookInMillis, result.reducedAggregations()
             );
         }
     }

+ 1 - 1
src/main/java/org/elasticsearch/action/search/SearchRequest.java

@@ -51,7 +51,7 @@ import static org.elasticsearch.search.Scroll.readScroll;
  * {@link org.elasticsearch.client.Requests#searchRequest(String...)}.
  * <p/>
  * <p>Note, the search {@link #source(org.elasticsearch.search.builder.SearchSourceBuilder)}
- * is required. The search source is the different search options, including facets and such.
+ * is required. The search source is the different search options, including aggregations and such.
  * <p/>
  * <p>There is an option to specify an addition search source using the {@link #extraSource(org.elasticsearch.search.builder.SearchSourceBuilder)}.
  *

+ 8 - 69
src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java

@@ -34,7 +34,6 @@ import org.elasticsearch.script.ScriptService;
 import org.elasticsearch.search.Scroll;
 import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
 import org.elasticsearch.search.builder.SearchSourceBuilder;
-import org.elasticsearch.search.facet.FacetBuilder;
 import org.elasticsearch.search.highlight.HighlightBuilder;
 import org.elasticsearch.search.rescore.RescoreBuilder;
 import org.elasticsearch.search.sort.SortBuilder;
@@ -234,7 +233,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
 
     /**
      * Sets a filter that will be executed after the query has been executed and only has affect on the search hits
-     * (not aggregations or facets). This filter is always executed as last filtering mechanism.
+     * (not aggregations). This filter is always executed as last filtering mechanism.
      */
     public SearchRequestBuilder setPostFilter(FilterBuilder postFilter) {
         sourceBuilder().postFilter(postFilter);
@@ -243,7 +242,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchRequestBuilder setPostFilter(String postFilter) {
         sourceBuilder().postFilter(postFilter);
@@ -252,7 +251,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchRequestBuilder setPostFilter(BytesReference postFilter) {
         sourceBuilder().postFilter(postFilter);
@@ -261,7 +260,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchRequestBuilder setPostFilter(byte[] postFilter) {
         sourceBuilder().postFilter(postFilter);
@@ -270,7 +269,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchRequestBuilder setPostFilter(byte[] postFilter, int postFilterOffset, int postFilterLength) {
         sourceBuilder().postFilter(postFilter, postFilterOffset, postFilterLength);
@@ -279,7 +278,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchRequestBuilder setPostFilter(XContentBuilder postFilter) {
         sourceBuilder().postFilter(postFilter);
@@ -288,7 +287,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchRequestBuilder setPostFilter(Map postFilter) {
         sourceBuilder().postFilter(postFilter);
@@ -532,66 +531,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
         return this;
     }
 
-    /**
-     * Adds a facet to the search operation.
-     * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead.
-     */
-    @Deprecated
-    public SearchRequestBuilder addFacet(FacetBuilder facet) {
-        sourceBuilder().facet(facet);
-        return this;
-    }
-
-    /**
-     * Sets a raw (xcontent) binary representation of facets to use.
-     * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead.
-     */
-    @Deprecated
-    public SearchRequestBuilder setFacets(BytesReference facets) {
-        sourceBuilder().facets(facets);
-        return this;
-    }
-
-    /**
-     * Sets a raw (xcontent) binary representation of facets to use.
-     * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead.
-     */
-    @Deprecated
-    public SearchRequestBuilder setFacets(byte[] facets) {
-        sourceBuilder().facets(facets);
-        return this;
-    }
-
-    /**
-     * Sets a raw (xcontent) binary representation of facets to use.
-     * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead.
-     */
-    @Deprecated
-    public SearchRequestBuilder setFacets(byte[] facets, int facetsOffset, int facetsLength) {
-        sourceBuilder().facets(facets, facetsOffset, facetsLength);
-        return this;
-    }
-
-    /**
-     * Sets a raw (xcontent) binary representation of facets to use.
-     * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead.
-     */
-    @Deprecated
-    public SearchRequestBuilder setFacets(XContentBuilder facets) {
-        sourceBuilder().facets(facets);
-        return this;
-    }
-
-    /**
-     * Sets a raw (xcontent) binary representation of facets to use.
-     * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead.
-     */
-    @Deprecated
-    public SearchRequestBuilder setFacets(Map facets) {
-        sourceBuilder().facets(facets);
-        return this;
-    }
-
     /**
      * Adds an get to the search operation.
      */
@@ -620,7 +559,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
      * Sets a raw (xcontent) binary representation of addAggregation to use.
      */
     public SearchRequestBuilder setAggregations(byte[] aggregations, int aggregationsOffset, int aggregationsLength) {
-        sourceBuilder().facets(aggregations, aggregationsOffset, aggregationsLength);
+        sourceBuilder().aggregations(aggregations, aggregationsOffset, aggregationsLength);
         return this;
     }
 

+ 4 - 9
src/main/java/org/elasticsearch/action/search/SearchResponse.java

@@ -23,11 +23,13 @@ import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
 import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.xcontent.*;
+import org.elasticsearch.common.xcontent.StatusToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilderString;
+import org.elasticsearch.common.xcontent.XContentFactory;
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.search.SearchHits;
 import org.elasticsearch.search.aggregations.Aggregations;
-import org.elasticsearch.search.facet.Facets;
 import org.elasticsearch.search.internal.InternalSearchResponse;
 import org.elasticsearch.search.suggest.Suggest;
 
@@ -93,13 +95,6 @@ public class SearchResponse extends ActionResponse implements StatusToXContent {
         return internalResponse.hits();
     }
 
-    /**
-     * The search facets.
-     */
-    public Facets getFacets() {
-        return internalResponse.facets();
-    }
-
     public Aggregations getAggregations() {
         return internalResponse.aggregations();
     }

+ 1 - 1
src/main/java/org/elasticsearch/action/search/SearchType.java

@@ -56,7 +56,7 @@ public enum SearchType {
      */
     SCAN((byte) 4),
     /**
-     * Only counts the results, will still execute facets and the like.
+     * Only counts the results, will still execute aggregations and the like.
      */
     COUNT((byte) 5);
 

+ 1 - 1
src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollScanAction.java

@@ -119,7 +119,7 @@ public class TransportSearchScrollScanAction extends AbstractComponent {
 
         public void start() {
             if (scrollId.getContext().length == 0) {
-                final InternalSearchResponse internalResponse = new InternalSearchResponse(new InternalSearchHits(InternalSearchHits.EMPTY, Long.parseLong(this.scrollId.getAttributes().get("total_hits")), 0.0f), null, null, null, false, null);
+                final InternalSearchResponse internalResponse = new InternalSearchResponse(new InternalSearchHits(InternalSearchHits.EMPTY, Long.parseLong(this.scrollId.getAttributes().get("total_hits")), 0.0f), null, null, false, null);
                 listener.onResponse(new SearchResponse(internalResponse, request.scrollId(), 0, 0, 0l, buildShardFailures()));
                 return;
             }

+ 0 - 2
src/main/java/org/elasticsearch/index/query/HasChildFilterParser.java

@@ -91,8 +91,6 @@ public class HasChildFilterParser implements FilterParser {
             } else if (token.isValue()) {
                 if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
                     childType = parser.text();
-                } else if ("_scope".equals(currentFieldName)) {
-                    throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_child] filter has been removed, use a filter as a facet_filter in the relevant global facet");
                 } else if ("_name".equals(currentFieldName)) {
                     filterName = parser.text();
                 } else if ("_cache".equals(currentFieldName)) {

+ 0 - 3
src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java

@@ -89,9 +89,6 @@ public class HasChildQueryParser implements QueryParser {
             } else if (token.isValue()) {
                 if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
                     childType = parser.text();
-                } else if ("_scope".equals(currentFieldName)) {
-                    throw new QueryParsingException(parseContext.index(),
-                            "the [_scope] support in [has_child] query has been removed, use a filter as a facet_filter in the relevant global facet");
                 } else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
                     scoreType = ScoreType.fromString(parser.text());
                 } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {

+ 0 - 2
src/main/java/org/elasticsearch/index/query/HasParentFilterParser.java

@@ -90,8 +90,6 @@ public class HasParentFilterParser implements FilterParser {
             } else if (token.isValue()) {
                 if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) {
                     parentType = parser.text();
-                } else if ("_scope".equals(currentFieldName)) {
-                    throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] filter has been removed, use a filter as a facet_filter in the relevant global facet");
                 } else if ("_name".equals(currentFieldName)) {
                     filterName = parser.text();
                 } else if ("_cache".equals(currentFieldName)) {

+ 0 - 2
src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java

@@ -85,8 +85,6 @@ public class HasParentQueryParser implements QueryParser {
             } else if (token.isValue()) {
                 if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) {
                     parentType = parser.text();
-                } else if ("_scope".equals(currentFieldName)) {
-                    throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] query has been removed, use a filter as a facet_filter in the relevant global facet");
                 } else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
                     String scoreTypeValue = parser.text();
                     if ("score".equals(scoreTypeValue)) {

+ 2 - 13
src/main/java/org/elasticsearch/index/query/NestedFilterParser.java

@@ -59,7 +59,6 @@ public class NestedFilterParser implements FilterParser {
         Filter filter = null;
         boolean filterFound = false;
         float boost = 1.0f;
-        boolean join = true;
         String path = null;
         boolean cache = false;
         CacheKeyFilter.Key cacheKey = null;
@@ -88,14 +87,10 @@ public class NestedFilterParser implements FilterParser {
                         throw new QueryParsingException(parseContext.index(), "[nested] filter does not support [" + currentFieldName + "]");
                     }
                 } else if (token.isValue()) {
-                    if ("join".equals(currentFieldName)) {
-                        join = parser.booleanValue();
-                    } else if ("path".equals(currentFieldName)) {
+                    if ("path".equals(currentFieldName)) {
                         path = parser.text();
                     } else if ("boost".equals(currentFieldName)) {
                         boost = parser.floatValue();
-                    } else if ("_scope".equals(currentFieldName)) {
-                        throw new QueryParsingException(parseContext.index(), "the [_scope] support in [nested] filter has been removed, use nested filter as a facet_filter in the relevant facet");
                     } else if ("_name".equals(currentFieldName)) {
                         filterName = parser.text();
                     } else if ("_cache".equals(currentFieldName)) {
@@ -155,13 +150,7 @@ public class NestedFilterParser implements FilterParser {
             // expects FixedBitSet instances
             parentFilter = new FixedBitSetCachingWrapperFilter(parentFilter);
 
-            Filter nestedFilter;
-            if (join) {
-                ToParentBlockJoinQuery joinQuery = new ToParentBlockJoinQuery(query, parentFilter, ScoreMode.None);
-                nestedFilter = Queries.wrap(joinQuery);
-            } else {
-                nestedFilter = Queries.wrap(query);
-            }
+            Filter nestedFilter = Queries.wrap(new ToParentBlockJoinQuery(query, parentFilter, ScoreMode.None));
 
             if (cache) {
                 nestedFilter = parseContext.cacheFilter(nestedFilter, cacheKey);

+ 0 - 2
src/main/java/org/elasticsearch/index/query/NestedQueryParser.java

@@ -91,8 +91,6 @@ public class NestedQueryParser implements QueryParser {
                         path = parser.text();
                     } else if ("boost".equals(currentFieldName)) {
                         boost = parser.floatValue();
-                    } else if ("_scope".equals(currentFieldName)) {
-                        throw new QueryParsingException(parseContext.index(), "the [_scope] support in [nested] query has been removed, use nested filter as a facet_filter in the relevant facet");
                     } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
                         String sScoreMode = parser.text();
                         if ("avg".equals(sScoreMode)) {

+ 0 - 2
src/main/java/org/elasticsearch/index/query/TopChildrenQueryParser.java

@@ -86,8 +86,6 @@ public class TopChildrenQueryParser implements QueryParser {
             } else if (token.isValue()) {
                 if ("type".equals(currentFieldName)) {
                     childType = parser.text();
-                } else if ("_scope".equals(currentFieldName)) {
-                    throw new QueryParsingException(parseContext.index(), "the [_scope] support in [top_children] query has been removed, use a filter as a facet_filter in the relevant global facet");
                 } else if ("score".equals(currentFieldName)) {
                     scoreType = ScoreType.fromString(parser.text());
                 } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {

+ 0 - 13
src/main/java/org/elasticsearch/percolator/PercolateContext.java

@@ -53,7 +53,6 @@ import org.elasticsearch.search.SearchHitField;
 import org.elasticsearch.search.SearchShardTarget;
 import org.elasticsearch.search.aggregations.SearchContextAggregations;
 import org.elasticsearch.search.dfs.DfsSearchResult;
-import org.elasticsearch.search.facet.SearchContextFacets;
 import org.elasticsearch.search.fetch.FetchSearchResult;
 import org.elasticsearch.search.fetch.FetchSubPhase;
 import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
@@ -106,7 +105,6 @@ public class PercolateContext extends SearchContext {
     private boolean queryRewritten;
     private Query percolateQuery;
     private FetchSubPhase.HitContext hitContext;
-    private SearchContextFacets facets;
     private SearchContextAggregations aggregations;
     private QuerySearchResult querySearchResult;
     private Sort sort;
@@ -273,17 +271,6 @@ public class PercolateContext extends SearchContext {
         return this;
     }
 
-    @Override
-    public SearchContextFacets facets() {
-        return facets;
-    }
-
-    @Override
-    public SearchContext facets(SearchContextFacets facets) {
-        this.facets = facets;
-        return this;
-    }
-
     // Unused:
     @Override
     public void preProcess() {

+ 9 - 63
src/main/java/org/elasticsearch/percolator/PercolatorService.java

@@ -20,7 +20,6 @@ package org.elasticsearch.percolator;
 
 import com.carrotsearch.hppc.ByteObjectOpenHashMap;
 import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
 import org.apache.lucene.index.AtomicReaderContext;
 import org.apache.lucene.index.ReaderUtil;
 import org.apache.lucene.index.memory.ExtendedMemoryIndex;
@@ -83,10 +82,6 @@ import org.elasticsearch.search.SearchShardTarget;
 import org.elasticsearch.search.aggregations.AggregationPhase;
 import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
 import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.facet.Facet;
-import org.elasticsearch.search.facet.FacetPhase;
-import org.elasticsearch.search.facet.InternalFacet;
-import org.elasticsearch.search.facet.InternalFacets;
 import org.elasticsearch.search.highlight.HighlightField;
 import org.elasticsearch.search.highlight.HighlightPhase;
 import org.elasticsearch.search.internal.SearchContext;
@@ -117,7 +112,6 @@ public class PercolatorService extends AbstractComponent {
     private final PercolatorIndex single;
     private final PercolatorIndex multi;
 
-    private final FacetPhase facetPhase;
     private final HighlightPhase highlightPhase;
     private final AggregationPhase aggregationPhase;
     private final SortParseElement sortParseElement;
@@ -129,7 +123,7 @@ public class PercolatorService extends AbstractComponent {
     @Inject
     public PercolatorService(Settings settings, IndicesService indicesService, CacheRecycler cacheRecycler,
                              PageCacheRecycler pageCacheRecycler, BigArrays bigArrays,
-                             HighlightPhase highlightPhase, ClusterService clusterService, FacetPhase facetPhase,
+                             HighlightPhase highlightPhase, ClusterService clusterService,
                              AggregationPhase aggregationPhase, ScriptService scriptService,
                              MappingUpdatedAction mappingUpdatedAction) {
         super(settings);
@@ -139,7 +133,6 @@ public class PercolatorService extends AbstractComponent {
         this.bigArrays = bigArrays;
         this.clusterService = clusterService;
         this.highlightPhase = highlightPhase;
-        this.facetPhase = facetPhase;
         this.aggregationPhase = aggregationPhase;
         this.scriptService = scriptService;
         this.mappingUpdatedAction = mappingUpdatedAction;
@@ -195,7 +188,7 @@ public class PercolatorService extends AbstractComponent {
                 throw new ElasticsearchIllegalArgumentException("Nothing to percolate");
             }
 
-            if (context.percolateQuery() == null && (context.trackScores() || context.doSort || context.facets() != null || context.aggregations() != null)) {
+            if (context.percolateQuery() == null && (context.trackScores() || context.doSort || context.aggregations() != null)) {
                 context.percolateQuery(new MatchAllDocsQuery());
             }
 
@@ -251,7 +244,6 @@ public class PercolatorService extends AbstractComponent {
 
         // TODO: combine all feature parse elements into one map
         Map<String, ? extends SearchParseElement> hlElements = highlightPhase.parseElements();
-        Map<String, ? extends SearchParseElement> facetElements = facetPhase.parseElements();
         Map<String, ? extends SearchParseElement> aggregationElements = aggregationPhase.parseElements();
 
         ParsedDocument doc = null;
@@ -289,10 +281,7 @@ public class PercolatorService extends AbstractComponent {
                 } else if (token == XContentParser.Token.START_OBJECT) {
                     SearchParseElement element = hlElements.get(currentFieldName);
                     if (element == null) {
-                        element = facetElements.get(currentFieldName);
-                        if (element == null) {
-                            element = aggregationElements.get(currentFieldName);
-                        }
+                        element = aggregationElements.get(currentFieldName);
                     }
 
                     if ("query".equals(currentFieldName)) {
@@ -439,9 +428,8 @@ public class PercolatorService extends AbstractComponent {
             }
 
             assert !shardResults.isEmpty();
-            InternalFacets reducedFacets = reduceFacets(shardResults);
             InternalAggregations reducedAggregations = reduceAggregations(shardResults);
-            return new ReduceResult(finalCount, reducedFacets, reducedAggregations);
+            return new ReduceResult(finalCount, reducedAggregations);
         }
 
         @Override
@@ -533,9 +521,8 @@ public class PercolatorService extends AbstractComponent {
             }
 
             assert !shardResults.isEmpty();
-            InternalFacets reducedFacets = reduceFacets(shardResults);
             InternalAggregations reducedAggregations = reduceAggregations(shardResults);
-            return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedFacets, reducedAggregations);
+            return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedAggregations);
         }
 
         @Override
@@ -732,9 +719,8 @@ public class PercolatorService extends AbstractComponent {
             }
 
             assert !shardResults.isEmpty();
-            InternalFacets reducedFacets = reduceFacets(shardResults);
             InternalAggregations reducedAggregations = reduceAggregations(shardResults);
-            return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedFacets, reducedAggregations);
+            return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedAggregations);
         }
 
         @Override
@@ -794,14 +780,11 @@ public class PercolatorService extends AbstractComponent {
         percolatorTypeFilter = context.indexService().cache().filter().cache(percolatorTypeFilter);
         XFilteredQuery query = new XFilteredQuery(context.percolateQuery(), percolatorTypeFilter);
         percolatorSearcher.searcher().search(query, percolateCollector);
-        for (Collector queryCollector : percolateCollector.facetAndAggregatorCollector) {
+        for (Collector queryCollector : percolateCollector.aggregatorCollector) {
             if (queryCollector instanceof XCollector) {
                 ((XCollector) queryCollector).postCollection();
             }
         }
-        if (context.facets() != null) {
-            facetPhase.execute(context);
-        }
         if (context.aggregations() != null) {
             aggregationPhase.execute(context);
         }
@@ -811,20 +794,17 @@ public class PercolatorService extends AbstractComponent {
 
         private final long count;
         private final PercolateResponse.Match[] matches;
-        private final InternalFacets reducedFacets;
         private final InternalAggregations reducedAggregations;
 
-        ReduceResult(long count, PercolateResponse.Match[] matches, InternalFacets reducedFacets, InternalAggregations reducedAggregations) {
+        ReduceResult(long count, PercolateResponse.Match[] matches, InternalAggregations reducedAggregations) {
             this.count = count;
             this.matches = matches;
-            this.reducedFacets = reducedFacets;
             this.reducedAggregations = reducedAggregations;
         }
 
-        public ReduceResult(long count, InternalFacets reducedFacets, InternalAggregations reducedAggregations) {
+        public ReduceResult(long count, InternalAggregations reducedAggregations) {
             this.count = count;
             this.matches = null;
-            this.reducedFacets = reducedFacets;
             this.reducedAggregations = reducedAggregations;
         }
 
@@ -836,45 +816,11 @@ public class PercolatorService extends AbstractComponent {
             return matches;
         }
 
-        public InternalFacets reducedFacets() {
-            return reducedFacets;
-        }
-
         public InternalAggregations reducedAggregations() {
             return reducedAggregations;
         }
     }
 
-    private InternalFacets reduceFacets(List<PercolateShardResponse> shardResults) {
-        if (shardResults.get(0).facets() == null) {
-            return null;
-        }
-
-        if (shardResults.size() == 1) {
-            return shardResults.get(0).facets();
-        }
-
-        PercolateShardResponse firstShardResponse = shardResults.get(0);
-        List<Facet> aggregatedFacets = Lists.newArrayList();
-        List<Facet> namedFacets = Lists.newArrayList();
-        for (Facet facet : firstShardResponse.facets()) {
-            // aggregate each facet name into a single list, and aggregate it
-            namedFacets.clear();
-            for (PercolateShardResponse entry : shardResults) {
-                for (Facet facet1 : entry.facets()) {
-                    if (facet.getName().equals(facet1.getName())) {
-                        namedFacets.add(facet1);
-                    }
-                }
-            }
-            if (!namedFacets.isEmpty()) {
-                Facet aggregatedFacet = ((InternalFacet) namedFacets.get(0)).reduce(new InternalFacet.ReduceContext(cacheRecycler, namedFacets));
-                aggregatedFacets.add(aggregatedFacet);
-            }
-        }
-        return new InternalFacets(aggregatedFacets);
-    }
-
     private InternalAggregations reduceAggregations(List<PercolateShardResponse> shardResults) {
         if (shardResults.get(0).aggregations() == null) {
             return null;

+ 7 - 26
src/main/java/org/elasticsearch/percolator/QueryCollector.java

@@ -26,7 +26,6 @@ import org.apache.lucene.search.*;
 import org.apache.lucene.util.BytesRef;
 import org.elasticsearch.common.logging.ESLogger;
 import org.elasticsearch.common.lucene.Lucene;
-import org.elasticsearch.common.lucene.search.FilteredCollector;
 import org.elasticsearch.index.fielddata.IndexFieldData;
 import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
 import org.elasticsearch.index.mapper.FieldMapper;
@@ -37,8 +36,6 @@ import org.elasticsearch.search.aggregations.AggregationPhase;
 import org.elasticsearch.search.aggregations.Aggregator;
 import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
 import org.elasticsearch.search.aggregations.support.AggregationContext;
-import org.elasticsearch.search.facet.SearchContextFacets;
-import org.elasticsearch.search.facet.nested.NestedFacetExecutor;
 import org.elasticsearch.search.highlight.HighlightField;
 import org.elasticsearch.search.highlight.HighlightPhase;
 
@@ -63,7 +60,7 @@ abstract class QueryCollector extends Collector {
 
     SortedBinaryDocValues values;
 
-    final List<Collector> facetAndAggregatorCollector;
+    final List<Collector> aggregatorCollector;
 
     QueryCollector(ESLogger logger, PercolateContext context, boolean isNestedDoc) {
         this.logger = logger;
@@ -73,23 +70,7 @@ abstract class QueryCollector extends Collector {
         this.idFieldData = context.fieldData().getForField(idMapper);
         this.isNestedDoc = isNestedDoc;
 
-        ImmutableList.Builder<Collector> facetAggCollectorBuilder = ImmutableList.builder();
-        if (context.facets() != null) {
-            for (SearchContextFacets.Entry entry : context.facets().entries()) {
-                if (entry.isGlobal()) {
-                    continue; // not supported for now
-                }
-                Collector collector = entry.getFacetExecutor().collector();
-                if (entry.getFilter() != null) {
-                    if (collector instanceof NestedFacetExecutor.Collector) {
-                        collector = new NestedFacetExecutor.Collector((NestedFacetExecutor.Collector) collector, entry.getFilter());
-                    } else {
-                        collector = new FilteredCollector(collector, entry.getFilter());
-                    }
-                }
-                facetAggCollectorBuilder.add(collector);
-            }
-        }
+        ImmutableList.Builder<Collector> aggCollectorBuilder = ImmutableList.builder();
 
         if (context.aggregations() != null) {
             AggregationContext aggregationContext = new AggregationContext(context);
@@ -107,22 +88,22 @@ abstract class QueryCollector extends Collector {
             }
             context.aggregations().aggregators(aggregators);
             if (!aggregatorCollectors.isEmpty()) {
-                facetAggCollectorBuilder.add(new AggregationPhase.AggregationsCollector(aggregatorCollectors, aggregationContext));
+                aggCollectorBuilder.add(new AggregationPhase.AggregationsCollector(aggregatorCollectors, aggregationContext));
             }
             aggregationContext.setNextReader(context.searcher().getIndexReader().getContext());
         }
-        facetAndAggregatorCollector = facetAggCollectorBuilder.build();
+        aggregatorCollector = aggCollectorBuilder.build();
     }
 
     public void postMatch(int doc) throws IOException {
-        for (Collector collector : facetAndAggregatorCollector) {
+        for (Collector collector : aggregatorCollector) {
             collector.collect(doc);
         }
     }
 
     @Override
     public void setScorer(Scorer scorer) throws IOException {
-        for (Collector collector : facetAndAggregatorCollector) {
+        for (Collector collector : aggregatorCollector) {
             collector.setScorer(scorer);
         }
     }
@@ -131,7 +112,7 @@ abstract class QueryCollector extends Collector {
     public void setNextReader(AtomicReaderContext context) throws IOException {
         // we use the UID because id might not be indexed
         values = idFieldData.load(context).getBytesValues();
-        for (Collector collector : facetAndAggregatorCollector) {
+        for (Collector collector : aggregatorCollector) {
             collector.setNextReader(context);
         }
     }

+ 2 - 8
src/main/java/org/elasticsearch/script/AbstractSearchScript.java

@@ -19,21 +19,15 @@
 
 package org.elasticsearch.script;
 
-import org.elasticsearch.search.lookup.IndexLookup;
-
 import org.apache.lucene.index.AtomicReaderContext;
 import org.apache.lucene.search.Scorer;
 import org.elasticsearch.index.fielddata.ScriptDocValues;
-import org.elasticsearch.search.lookup.DocLookup;
-import org.elasticsearch.search.lookup.FieldsLookup;
-import org.elasticsearch.search.lookup.SearchLookup;
-import org.elasticsearch.search.lookup.SourceLookup;
+import org.elasticsearch.search.lookup.*;
 
-import java.io.IOException;
 import java.util.Map;
 
 /**
- * A base class for any script type that is used during the search process (custom score, facets, and so on).
+ * A base class for any script type that is used during the search process (custom score, aggs, and so on).
  * <p/>
  * <p>If the script returns a specific numeric type, consider overriding the type specific base classes
  * such as {@link AbstractDoubleSearchScript}, {@link AbstractFloatSearchScript} and {@link AbstractLongSearchScript}

+ 1 - 2
src/main/java/org/elasticsearch/search/SearchModule.java

@@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.AggregationModule;
 import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificantTermsHeuristicModule;
 import org.elasticsearch.search.controller.SearchPhaseController;
 import org.elasticsearch.search.dfs.DfsPhase;
-import org.elasticsearch.search.facet.FacetModule;
 import org.elasticsearch.search.fetch.FetchPhase;
 import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
 import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
@@ -51,7 +50,7 @@ public class SearchModule extends AbstractModule implements SpawnModules {
 
     @Override
     public Iterable<? extends Module> spawnModules() {
-        return ImmutableList.of(new TransportSearchModule(), new FacetModule(), new HighlightModule(), new SuggestModule(), new FunctionScoreModule(), new AggregationModule(), new SignificantTermsHeuristicModule());
+        return ImmutableList.of(new TransportSearchModule(), new HighlightModule(), new SuggestModule(), new FunctionScoreModule(), new AggregationModule(), new SignificantTermsHeuristicModule());
     }
 
     @Override

+ 1 - 2
src/main/java/org/elasticsearch/search/TransportSearchModule.java

@@ -23,7 +23,6 @@ import org.elasticsearch.common.inject.AbstractModule;
 import org.elasticsearch.common.inject.Module;
 import org.elasticsearch.common.inject.SpawnModules;
 import org.elasticsearch.search.aggregations.TransportAggregationModule;
-import org.elasticsearch.search.facet.TransportFacetModule;
 
 /**
  *
@@ -32,7 +31,7 @@ public class TransportSearchModule extends AbstractModule implements SpawnModule
 
     @Override
     public Iterable<? extends Module> spawnModules() {
-        return ImmutableList.of(new TransportFacetModule(), new TransportAggregationModule());
+        return ImmutableList.of(new TransportAggregationModule());
     }
 
     @Override

+ 2 - 2
src/main/java/org/elasticsearch/search/aggregations/AggregationBinaryParseElement.java

@@ -35,8 +35,8 @@ public class AggregationBinaryParseElement extends AggregationParseElement {
 
     @Override
     public void parse(XContentParser parser, SearchContext context) throws Exception {
-        byte[] facetSource = parser.binaryValue();
-        try (XContentParser aSourceParser = XContentFactory.xContent(facetSource).createParser(facetSource)) {
+        byte[] aggSource = parser.binaryValue();
+        try (XContentParser aSourceParser = XContentFactory.xContent(aggSource).createParser(aggSource)) {
             aSourceParser.nextToken(); // move past the first START_OBJECT
             super.parse(aSourceParser, context);
         }

+ 5 - 5
src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java

@@ -81,20 +81,20 @@ public abstract class AggregationBuilder<B extends AggregationBuilder<B>> extend
     /**
      * Sets a raw (xcontent / json) sub addAggregation.
      */
-    public B subAggregation(XContentBuilder facets) {
-        return subAggregation(facets.bytes());
+    public B subAggregation(XContentBuilder aggs) {
+        return subAggregation(aggs.bytes());
     }
 
     /**
      * Sets a raw (xcontent / json) sub addAggregation.
      */
-    public B subAggregation(Map<String, Object> facets) {
+    public B subAggregation(Map<String, Object> aggs) {
         try {
             XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE);
-            builder.map(facets);
+            builder.map(aggs);
             return subAggregation(builder);
         } catch (IOException e) {
-            throw new ElasticsearchGenerationException("Failed to generate [" + facets + "]", e);
+            throw new ElasticsearchGenerationException("Failed to generate [" + aggs + "]", e);
         }
     }
 

+ 1 - 1
src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java

@@ -100,7 +100,7 @@ public class AggregationPhase implements SearchPhase {
         }
 
         if (context.queryResult().aggregations() != null) {
-            // no need to compute the facets twice, they should be computed on a per context basis
+            // no need to compute the aggs twice, they should be computed on a per context basis
             return;
         }
 

+ 1 - 1
src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java

@@ -76,7 +76,7 @@ public class StringTerms extends InternalTerms {
 
         @Override
         public Number getKeyAsNumber() {
-            // this method is needed for scripted numeric faceting
+            // this method is needed for scripted numeric aggs
             return Double.parseDouble(termBytes.utf8ToString());
         }
 

+ 0 - 11
src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsContext.java

@@ -45,7 +45,6 @@ import org.elasticsearch.search.Scroll;
 import org.elasticsearch.search.SearchShardTarget;
 import org.elasticsearch.search.aggregations.SearchContextAggregations;
 import org.elasticsearch.search.dfs.DfsSearchResult;
-import org.elasticsearch.search.facet.SearchContextFacets;
 import org.elasticsearch.search.fetch.FetchSearchResult;
 import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
 import org.elasticsearch.search.fetch.partial.PartialFieldsContext;
@@ -194,16 +193,6 @@ public class TopHitsContext extends SearchContext {
         throw new UnsupportedOperationException("Not supported");
     }
 
-    @Override
-    public SearchContextFacets facets() {
-        return context.facets();
-    }
-
-    @Override
-    public SearchContext facets(SearchContextFacets facets) {
-        throw new UnsupportedOperationException("Not supported");
-    }
-
     public SearchContextHighlight highlight() {
         return highlight;
     }

+ 7 - 81
src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java

@@ -38,7 +38,6 @@ import org.elasticsearch.common.xcontent.XContentType;
 import org.elasticsearch.index.query.FilterBuilder;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
-import org.elasticsearch.search.facet.FacetBuilder;
 import org.elasticsearch.search.fetch.source.FetchSourceContext;
 import org.elasticsearch.search.highlight.HighlightBuilder;
 import org.elasticsearch.search.internal.SearchContext;
@@ -107,9 +106,6 @@ public class SearchSourceBuilder implements ToXContent {
     private List<PartialField> partialFields;
     private FetchSourceContext fetchSourceContext;
 
-    private List<FacetBuilder> facets;
-    private BytesReference facetsBinary;
-
     private List<AbstractAggregationBuilder> aggregations;
     private BytesReference aggregationsBinary;
 
@@ -193,7 +189,7 @@ public class SearchSourceBuilder implements ToXContent {
 
     /**
      * Sets a filter that will be executed after the query has been executed and only has affect on the search hits
-     * (not aggregations or facets). This filter is always executed as last filtering mechanism.
+     * (not aggregations). This filter is always executed as last filtering mechanism.
      */
     public SearchSourceBuilder postFilter(FilterBuilder postFilter) {
         this.postFilterBuilder = postFilter;
@@ -202,7 +198,7 @@ public class SearchSourceBuilder implements ToXContent {
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchSourceBuilder postFilter(String postFilterString) {
         return postFilter(postFilterString.getBytes(Charsets.UTF_8));
@@ -210,7 +206,7 @@ public class SearchSourceBuilder implements ToXContent {
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchSourceBuilder postFilter(byte[] postFilter) {
         return postFilter(postFilter, 0, postFilter.length);
@@ -218,7 +214,7 @@ public class SearchSourceBuilder implements ToXContent {
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchSourceBuilder postFilter(byte[] postFilterBinary, int postFilterBinaryOffset, int postFilterBinaryLength) {
         return postFilter(new BytesArray(postFilterBinary, postFilterBinaryOffset, postFilterBinaryLength));
@@ -226,7 +222,7 @@ public class SearchSourceBuilder implements ToXContent {
 
     /**
      * Sets a filter on the query executed that only applies to the search query
-     * (and not facets for example).
+     * (and not aggs for example).
      */
     public SearchSourceBuilder postFilter(BytesReference postFilterBinary) {
         this.filterBinary = postFilterBinary;
@@ -361,59 +357,6 @@ public class SearchSourceBuilder implements ToXContent {
         return this;
     }
 
-    /**
-     * Add a facet to perform as part of the search.
-     */
-    public SearchSourceBuilder facet(FacetBuilder facet) {
-        if (facets == null) {
-            facets = Lists.newArrayList();
-        }
-        facets.add(facet);
-        return this;
-    }
-
-    /**
-     * Sets a raw (xcontent / json) facets.
-     */
-    public SearchSourceBuilder facets(byte[] facetsBinary) {
-        return facets(facetsBinary, 0, facetsBinary.length);
-    }
-
-    /**
-     * Sets a raw (xcontent / json) facets.
-     */
-    public SearchSourceBuilder facets(byte[] facetsBinary, int facetBinaryOffset, int facetBinaryLength) {
-        return facets(new BytesArray(facetsBinary, facetBinaryOffset, facetBinaryLength));
-    }
-
-    /**
-     * Sets a raw (xcontent / json) facets.
-     */
-    public SearchSourceBuilder facets(BytesReference facetsBinary) {
-        this.facetsBinary = facetsBinary;
-        return this;
-    }
-
-    /**
-     * Sets a raw (xcontent / json) facets.
-     */
-    public SearchSourceBuilder facets(XContentBuilder facets) {
-        return facets(facets.bytes());
-    }
-
-    /**
-     * Sets a raw (xcontent / json) facets.
-     */
-    public SearchSourceBuilder facets(Map facets) {
-        try {
-            XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE);
-            builder.map(facets);
-            return facets(builder);
-        } catch (IOException e) {
-            throw new ElasticsearchGenerationException("Failed to generate [" + facets + "]", e);
-        }
-    }
-
     /**
      * Add an get to perform as part of the search.
      */
@@ -450,8 +393,8 @@ public class SearchSourceBuilder implements ToXContent {
     /**
      * Sets a raw (xcontent / json) addAggregation.
      */
-    public SearchSourceBuilder aggregations(XContentBuilder facets) {
-        return aggregations(facets.bytes());
+    public SearchSourceBuilder aggregations(XContentBuilder aggs) {
+        return aggregations(aggs.bytes());
     }
 
     /**
@@ -892,23 +835,6 @@ public class SearchSourceBuilder implements ToXContent {
             builder.endObject();
         }
 
-        if (facets != null) {
-            builder.field("facets");
-            builder.startObject();
-            for (FacetBuilder facet : facets) {
-                facet.toXContent(builder, params);
-            }
-            builder.endObject();
-        }
-
-        if (facetsBinary != null) {
-            if (XContentFactory.xContentType(facetsBinary) == builder.contentType()) {
-                builder.rawField("facets", facetsBinary);
-            } else {
-                builder.field("facets_binary", facetsBinary);
-            }
-        }
-
         if (aggregations != null) {
             builder.field("aggregations");
             builder.startObject();

+ 1 - 33
src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java

@@ -21,7 +21,6 @@ package org.elasticsearch.search.controller;
 
 import com.carrotsearch.hppc.IntArrayList;
 import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
-import com.google.common.collect.Lists;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.*;
 import org.elasticsearch.action.search.SearchRequest;
@@ -38,9 +37,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
 import org.elasticsearch.search.aggregations.InternalAggregations;
 import org.elasticsearch.search.dfs.AggregatedDfs;
 import org.elasticsearch.search.dfs.DfsSearchResult;
-import org.elasticsearch.search.facet.Facet;
-import org.elasticsearch.search.facet.InternalFacet;
-import org.elasticsearch.search.facet.InternalFacets;
 import org.elasticsearch.search.fetch.FetchSearchResult;
 import org.elasticsearch.search.fetch.FetchSearchResultProvider;
 import org.elasticsearch.search.internal.InternalSearchHit;
@@ -71,7 +67,6 @@ public class SearchPhaseController extends AbstractComponent {
 
     public static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0];
 
-    private final CacheRecycler cacheRecycler;
     private final BigArrays bigArrays;
     private final boolean optimizeSingleShard;
 
@@ -80,7 +75,6 @@ public class SearchPhaseController extends AbstractComponent {
     @Inject
     public SearchPhaseController(Settings settings, CacheRecycler cacheRecycler, BigArrays bigArrays, ScriptService scriptService) {
         super(settings);
-        this.cacheRecycler = cacheRecycler;
         this.bigArrays = bigArrays;
         this.scriptService = scriptService;
         this.optimizeSingleShard = componentSettings.getAsBoolean("optimize_single_shard", true);
@@ -293,32 +287,6 @@ public class SearchPhaseController extends AbstractComponent {
             }
         }
 
-        // merge facets
-        InternalFacets facets = null;
-        if (!queryResults.isEmpty()) {
-            // we rely on the fact that the order of facets is the same on all query results
-            if (firstResult.facets() != null && firstResult.facets().facets() != null && !firstResult.facets().facets().isEmpty()) {
-                List<Facet> aggregatedFacets = Lists.newArrayList();
-                List<Facet> namedFacets = Lists.newArrayList();
-                for (Facet facet : firstResult.facets()) {
-                    // aggregate each facet name into a single list, and aggregate it
-                    namedFacets.clear();
-                    for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
-                        for (Facet facet1 : entry.value.queryResult().facets()) {
-                            if (facet.getName().equals(facet1.getName())) {
-                                namedFacets.add(facet1);
-                            }
-                        }
-                    }
-                    if (!namedFacets.isEmpty()) {
-                        Facet aggregatedFacet = ((InternalFacet) namedFacets.get(0)).reduce(new InternalFacet.ReduceContext(cacheRecycler, namedFacets));
-                        aggregatedFacets.add(aggregatedFacet);
-                    }
-                }
-                facets = new InternalFacets(aggregatedFacets);
-            }
-        }
-
         // count the total (we use the query result provider here, since we might not get any hits (we scrolled past them))
         long totalHits = 0;
         float maxScore = Float.NEGATIVE_INFINITY;
@@ -410,7 +378,7 @@ public class SearchPhaseController extends AbstractComponent {
 
         InternalSearchHits searchHits = new InternalSearchHits(hits.toArray(new InternalSearchHit[hits.size()]), totalHits, maxScore);
 
-        return new InternalSearchResponse(searchHits, facets, aggregations, suggest, timedOut, terminatedEarly);
+        return new InternalSearchResponse(searchHits, aggregations, suggest, timedOut, terminatedEarly);
     }
 
 }

+ 0 - 51
src/main/java/org/elasticsearch/search/facet/DoubleFacetAggregatorBase.java

@@ -1,51 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet;
-
-import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
-
-/**
- * Simple Facet aggregator base class for {@link DoubleValues}
- */
-public abstract class DoubleFacetAggregatorBase {
-    private int total;
-    private int missing;
-
-    public void onDoc(int docId, SortedNumericDoubleValues values) {
-        values.setDocument(docId);
-        int numValues = values.count();
-        int tempMissing = 1;
-        for (int i = 0; i < numValues; i++) {
-            tempMissing = 0;
-            onValue(docId, values.valueAt(i));
-            total++;
-        }
-        missing += tempMissing;
-    }
-
-    protected abstract void onValue(int docId, double next);
-
-    public final int total() {
-        return total;
-    }
-
-    public final int missing() {
-        return missing;
-    }
-}

+ 0 - 36
src/main/java/org/elasticsearch/search/facet/Facet.java

@@ -1,36 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-/**
- * A search facet.
- */
-public interface Facet {
-
-    /**
-     * The "logical" name of the search facet.
-     */
-    String getName();
-
-    /**
-     * The type of the facet.
-     */
-    String getType();
-}

+ 0 - 44
src/main/java/org/elasticsearch/search/facet/FacetBinaryParseElement.java

@@ -1,44 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet;
-
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.search.internal.SearchContext;
-
-/**
- *
- */
-public class FacetBinaryParseElement extends FacetParseElement {
-
-    @Inject
-    public FacetBinaryParseElement(FacetParsers facetParsers) {
-        super(facetParsers);
-    }
-
-    @Override
-    public void parse(XContentParser parser, SearchContext context) throws Exception {
-        byte[] facetSource = parser.binaryValue();
-        try (XContentParser fSourceParser = XContentFactory.xContent(facetSource).createParser(facetSource)) {
-            fSourceParser.nextToken(); // move past the first START_OBJECT
-            super.parse(fSourceParser, context);
-        }
-    }
-}

+ 0 - 102
src/main/java/org/elasticsearch/search/facet/FacetBuilder.java

@@ -1,102 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.query.FilterBuilder;
-
-import java.io.IOException;
-
-/**
- *
- */
-public abstract class FacetBuilder implements ToXContent {
-
-    public static enum Mode {
-        COLLECTOR() {
-            @Override
-            public String toString() {
-                return "collector";
-            }
-        },
-        POST() {
-            @Override
-            public String toString() {
-                return "post";
-            }
-        };
-
-        public abstract String toString();
-    }
-
-    protected final String name;
-    protected FilterBuilder facetFilter;
-    protected Boolean global;
-    protected String nested;
-    protected Mode mode;
-
-    protected FacetBuilder(String name) {
-        this.name = name;
-    }
-
-    public FacetBuilder facetFilter(FilterBuilder filter) {
-        this.facetFilter = filter;
-        return this;
-    }
-
-    /**
-     * Sets the nested path the facet will execute on. A match (root object) will then cause all the
-     * nested objects matching the path to be computed into the facet.
-     */
-    public FacetBuilder nested(String nested) {
-        this.nested = nested;
-        return this;
-    }
-
-    /**
-     * Marks the facet to run in a global scope, not bounded by any query.
-     */
-    public FacetBuilder global(boolean global) {
-        this.global = global;
-        return this;
-    }
-
-    public FacetBuilder mode(Mode mode) {
-        this.mode = mode;
-        return this;
-    }
-
-    protected void addFilterFacetAndGlobal(XContentBuilder builder, Params params) throws IOException {
-        if (facetFilter != null) {
-            builder.field("facet_filter");
-            facetFilter.toXContent(builder, params);
-        }
-        if (nested != null) {
-            builder.field("nested", nested);
-        }
-        if (global != null) {
-            builder.field("global", global);
-        }
-        if (mode != null) {
-            builder.field("mode", mode.toString());
-        }
-    }
-}

+ 0 - 98
src/main/java/org/elasticsearch/search/facet/FacetBuilders.java

@@ -1,98 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import org.elasticsearch.index.query.FilterBuilder;
-import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.search.facet.datehistogram.DateHistogramFacetBuilder;
-import org.elasticsearch.search.facet.filter.FilterFacetBuilder;
-import org.elasticsearch.search.facet.geodistance.GeoDistanceFacetBuilder;
-import org.elasticsearch.search.facet.histogram.HistogramFacetBuilder;
-import org.elasticsearch.search.facet.histogram.HistogramScriptFacetBuilder;
-import org.elasticsearch.search.facet.query.QueryFacetBuilder;
-import org.elasticsearch.search.facet.range.RangeFacetBuilder;
-import org.elasticsearch.search.facet.range.RangeScriptFacetBuilder;
-import org.elasticsearch.search.facet.statistical.StatisticalFacetBuilder;
-import org.elasticsearch.search.facet.statistical.StatisticalScriptFacetBuilder;
-import org.elasticsearch.search.facet.terms.TermsFacetBuilder;
-import org.elasticsearch.search.facet.termsstats.TermsStatsFacetBuilder;
-
-/**
- * @deprecated Facets are deprecated and will be removed in a future release. Please use aggregations instead.
- */
-@Deprecated
-public class FacetBuilders {
-
-    public static QueryFacetBuilder queryFacet(String facetName) {
-        return new QueryFacetBuilder(facetName);
-    }
-
-    public static QueryFacetBuilder queryFacet(String facetName, QueryBuilder query) {
-        return new QueryFacetBuilder(facetName).query(query);
-    }
-
-    public static FilterFacetBuilder filterFacet(String facetName) {
-        return new FilterFacetBuilder(facetName);
-    }
-
-    public static FilterFacetBuilder filterFacet(String facetName, FilterBuilder filter) {
-        return new FilterFacetBuilder(facetName).filter(filter);
-    }
-
-    public static TermsFacetBuilder termsFacet(String facetName) {
-        return new TermsFacetBuilder(facetName);
-    }
-
-    public static TermsStatsFacetBuilder termsStatsFacet(String facetName) {
-        return new TermsStatsFacetBuilder(facetName);
-    }
-
-    public static StatisticalFacetBuilder statisticalFacet(String facetName) {
-        return new StatisticalFacetBuilder(facetName);
-    }
-
-    public static StatisticalScriptFacetBuilder statisticalScriptFacet(String facetName) {
-        return new StatisticalScriptFacetBuilder(facetName);
-    }
-
-    public static HistogramFacetBuilder histogramFacet(String facetName) {
-        return new HistogramFacetBuilder(facetName);
-    }
-
-    public static DateHistogramFacetBuilder dateHistogramFacet(String facetName) {
-        return new DateHistogramFacetBuilder(facetName);
-    }
-
-    public static HistogramScriptFacetBuilder histogramScriptFacet(String facetName) {
-        return new HistogramScriptFacetBuilder(facetName);
-    }
-
-    public static RangeFacetBuilder rangeFacet(String facetName) {
-        return new RangeFacetBuilder(facetName);
-    }
-
-    public static RangeScriptFacetBuilder rangeScriptFacet(String facetName) {
-        return new RangeScriptFacetBuilder(facetName);
-    }
-
-    public static GeoDistanceFacetBuilder geoDistanceFacet(String facetName) {
-        return new GeoDistanceFacetBuilder(facetName);
-    }
-}

+ 0 - 160
src/main/java/org/elasticsearch/search/facet/FacetExecutor.java

@@ -1,160 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import org.apache.lucene.search.DocIdSet;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.Filter;
-import org.apache.lucene.search.Scorer;
-import org.elasticsearch.common.lucene.docset.AndDocIdSet;
-import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
-import org.elasticsearch.common.lucene.search.XCollector;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A facet processor ends up actually executing the relevant facet for a specific
- * search request.
- * <p/>
- * The facet executor requires at least the {@link #collector()} method to be implemented,
- * with an optional {@link #post()} implementation if specific optimizations can be done.
- */
-public abstract class FacetExecutor {
-
-    /**
-     * A post class extends this class to implement post hits processing.
-     */
-    public static abstract class Post {
-
-        public abstract void executePost(List<ContextDocIdSet> docSets) throws IOException;
-
-        /**
-         * A filtered post execution.
-         */
-        public static class Filtered extends Post {
-
-            private final Post post;
-            private final Filter filter;
-
-            public Filtered(Post post, Filter filter) {
-                this.post = post;
-                this.filter = filter;
-            }
-
-            @Override
-            public void executePost(List<ContextDocIdSet> docSets) throws IOException {
-                List<ContextDocIdSet> filteredEntries = new ArrayList<>(docSets.size());
-                for (int i = 0; i < docSets.size(); i++) {
-                    ContextDocIdSet entry = docSets.get(i);
-                    DocIdSet filteredSet = filter.getDocIdSet(entry.context, null);
-                    if (filteredSet != null) {
-                        filteredEntries.add(new ContextDocIdSet(
-                                entry.context,
-                                // TODO: can we be smart here, maybe AndDocIdSet is not always fastest?
-                                new AndDocIdSet(new DocIdSet[]{entry.docSet, filteredSet})
-                        ));
-                    }
-                }
-                post.executePost(filteredEntries);
-            }
-        }
-
-        /**
-         * A {@link FacetExecutor.Collector} based post.
-         */
-        public static class Collector extends Post {
-
-            private final FacetExecutor.Collector collector;
-
-            public Collector(FacetExecutor.Collector collector) {
-                this.collector = collector;
-            }
-
-            @Override
-            public void executePost(List<ContextDocIdSet> docSets) throws IOException {
-                for (int i = 0; i < docSets.size(); i++) {
-                    ContextDocIdSet docSet = docSets.get(i);
-                    collector.setNextReader(docSet.context);
-                    DocIdSetIterator it = docSet.docSet.iterator();
-                    int doc;
-                    while ((doc = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-                        collector.collect(doc);
-                    }
-                }
-                collector.postCollection();
-            }
-        }
-    }
-
-    /**
-     * Simple extension to {@link XCollector} that implements methods that are typically
-     * not needed when doing collector based faceting.
-     */
-    public static abstract class Collector extends XCollector {
-
-        @Override
-        public void setScorer(Scorer scorer) throws IOException {
-        }
-
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-            return true;
-        }
-
-        @Override
-        public abstract void postCollection() throws IOException;
-    }
-
-    /**
-     * The mode of the execution.
-     */
-    public static enum Mode {
-        /**
-         * Collector mode, maps to {@link #collector()}.
-         */
-        COLLECTOR,
-        /**
-         * Post mode, maps to {@link #post()}.
-         */
-        POST
-    }
-
-    /**
-     * Builds the facet.
-     */
-    public abstract InternalFacet buildFacet(String facetName);
-
-    /**
-     * A collector based facet implementation, collection the facet as hits match.
-     */
-    public abstract Collector collector();
-
-    /**
-     * A post based facet that executes the facet using the aggregated docs. By default
-     * uses the {@link Post.Collector} based implementation.
-     * <p/>
-     * Can be overridden if a more optimized non collector based implementation can be implemented.
-     */
-    public Post post() {
-        return new Post.Collector(collector());
-    }
-}

+ 0 - 69
src/main/java/org/elasticsearch/search/facet/FacetModule.java

@@ -1,69 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet;
-
-import com.google.common.collect.Lists;
-import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.inject.multibindings.Multibinder;
-import org.elasticsearch.search.facet.datehistogram.DateHistogramFacetParser;
-import org.elasticsearch.search.facet.filter.FilterFacetParser;
-import org.elasticsearch.search.facet.geodistance.GeoDistanceFacetParser;
-import org.elasticsearch.search.facet.histogram.HistogramFacetParser;
-import org.elasticsearch.search.facet.query.QueryFacetParser;
-import org.elasticsearch.search.facet.range.RangeFacetParser;
-import org.elasticsearch.search.facet.statistical.StatisticalFacetParser;
-import org.elasticsearch.search.facet.terms.TermsFacetParser;
-import org.elasticsearch.search.facet.termsstats.TermsStatsFacetParser;
-
-import java.util.List;
-
-/**
- *
- */
-public class FacetModule extends AbstractModule {
-
-    private List<Class<? extends FacetParser>> processors = Lists.newArrayList();
-
-    public FacetModule() {
-        processors.add(FilterFacetParser.class);
-        processors.add(QueryFacetParser.class);
-        processors.add(GeoDistanceFacetParser.class);
-        processors.add(HistogramFacetParser.class);
-        processors.add(DateHistogramFacetParser.class);
-        processors.add(RangeFacetParser.class);
-        processors.add(StatisticalFacetParser.class);
-        processors.add(TermsFacetParser.class);
-        processors.add(TermsStatsFacetParser.class);
-    }
-
-    public void addFacetProcessor(Class<? extends FacetParser> facetProcessor) {
-        processors.add(facetProcessor);
-    }
-
-    @Override
-    protected void configure() {
-        Multibinder<FacetParser> multibinder = Multibinder.newSetBinder(binder(), FacetParser.class);
-        for (Class<? extends FacetParser> processor : processors) {
-            multibinder.addBinding().to(processor);
-        }
-        bind(FacetParsers.class).asEagerSingleton();
-        bind(FacetParseElement.class).asEagerSingleton();
-        bind(FacetPhase.class).asEagerSingleton();
-    }
-}

+ 0 - 142
src/main/java/org/elasticsearch/search/facet/FacetParseElement.java

@@ -1,142 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet;
-
-import org.apache.lucene.search.Filter;
-import org.elasticsearch.ElasticsearchIllegalArgumentException;
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.query.ParsedFilter;
-import org.elasticsearch.search.SearchParseElement;
-import org.elasticsearch.search.SearchParseException;
-import org.elasticsearch.search.facet.nested.NestedFacetExecutor;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * <pre>
- * facets : {
- *  facet1: {
- *      query : { ... },
- *      global : false
- *  },
- *  facet2: {
- *      terms : {
- *          name : "myfield",
- *          size : 12
- *      },
- *      global : false
- *  }
- * }
- * </pre>
- */
-public class FacetParseElement implements SearchParseElement {
-
-    private final FacetParsers facetParsers;
-
-    @Inject
-    public FacetParseElement(FacetParsers facetParsers) {
-        this.facetParsers = facetParsers;
-    }
-
-    @Override
-    public void parse(XContentParser parser, SearchContext context) throws Exception {
-        XContentParser.Token token;
-
-        List<SearchContextFacets.Entry> entries = new ArrayList<>();
-
-        String facetName = null;
-        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
-            if (token == XContentParser.Token.FIELD_NAME) {
-                facetName = parser.currentName();
-            } else if (token == XContentParser.Token.START_OBJECT) {
-                FacetExecutor facetExecutor = null;
-                boolean global = false;
-                FacetExecutor.Mode defaultMainMode = null;
-                FacetExecutor.Mode defaultGlobalMode = null;
-                FacetExecutor.Mode mode = null;
-                Filter filter = null;
-                boolean cacheFilter = false;
-                String nestedPath = null;
-
-                String fieldName = null;
-                while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
-                    if (token == XContentParser.Token.FIELD_NAME) {
-                        fieldName = parser.currentName();
-                    } else if (token == XContentParser.Token.START_OBJECT) {
-                        if ("facet_filter".equals(fieldName) || "facetFilter".equals(fieldName)) {
-                            ParsedFilter parsedFilter = context.queryParserService().parseInnerFilter(parser);
-                            filter = parsedFilter == null ? null : parsedFilter.filter();
-                        } else {
-                            FacetParser facetParser = facetParsers.parser(fieldName);
-                            if (facetParser == null) {
-                                throw new SearchParseException(context, "No facet type found for [" + fieldName + "]");
-                            }
-                            facetExecutor = facetParser.parse(facetName, parser, context);
-                            defaultMainMode = facetParser.defaultMainMode();
-                            defaultGlobalMode = facetParser.defaultGlobalMode();
-                        }
-                    } else if (token.isValue()) {
-                        if ("global".equals(fieldName)) {
-                            global = parser.booleanValue();
-                        } else if ("mode".equals(fieldName)) {
-                            String modeAsText = parser.text();
-                            if ("collector".equals(modeAsText)) {
-                                mode = FacetExecutor.Mode.COLLECTOR;
-                            } else if ("post".equals(modeAsText)) {
-                                mode = FacetExecutor.Mode.POST;
-                            } else {
-                                throw new ElasticsearchIllegalArgumentException("failed to parse facet mode [" + modeAsText + "]");
-                            }
-                        } else if ("scope".equals(fieldName) || "_scope".equals(fieldName)) {
-                            throw new SearchParseException(context, "the [scope] support in facets have been removed");
-                        } else if ("cache_filter".equals(fieldName) || "cacheFilter".equals(fieldName)) {
-                            cacheFilter = parser.booleanValue();
-                        } else if ("nested".equals(fieldName)) {
-                            nestedPath = parser.text();
-                        }
-                    }
-                }
-
-                if (filter != null) {
-                    if (cacheFilter) {
-                        filter = context.filterCache().cache(filter);
-                    }
-                }
-
-                if (facetExecutor == null) {
-                    throw new SearchParseException(context, "no facet type found for facet named [" + facetName + "]");
-                }
-
-                if (nestedPath != null) {
-                    facetExecutor = new NestedFacetExecutor(facetExecutor, context, nestedPath);
-                }
-
-                if (mode == null) {
-                    mode = global ? defaultGlobalMode : defaultMainMode;
-                }
-                entries.add(new SearchContextFacets.Entry(facetName, mode, facetExecutor, global, filter));
-            }
-        }
-
-        context.facets(new SearchContextFacets(entries));
-    }
-}

+ 0 - 52
src/main/java/org/elasticsearch/search/facet/FacetParser.java

@@ -1,52 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet;
-
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-
-/**
- * A facet parser parses the relevant matching "type" of facet into a {@link FacetExecutor}.
- * <p/>
- * The parser also suggest the default {@link FacetExecutor.Mode} both for global and main executions.
- */
-public interface FacetParser {
-
-    /**
-     * The type of the facet, for example, terms.
-     */
-    String[] types();
-
-    /**
-     * The default mode to use when executed as a "main" (query level) facet.
-     */
-    FacetExecutor.Mode defaultMainMode();
-
-    /**
-     * The default mode to use when executed as a "global" (all docs) facet.
-     */
-    FacetExecutor.Mode defaultGlobalMode();
-
-    /**
-     * Parses the facet into a {@link FacetExecutor}.
-     */
-    FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException;
-}

+ 0 - 48
src/main/java/org/elasticsearch/search/facet/FacetParsers.java

@@ -1,48 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet;
-
-import com.google.common.collect.ImmutableMap;
-import org.elasticsearch.common.collect.MapBuilder;
-import org.elasticsearch.common.inject.Inject;
-
-import java.util.Set;
-
-/**
- *
- */
-public class FacetParsers {
-
-    private final ImmutableMap<String, FacetParser> parsers;
-
-    @Inject
-    public FacetParsers(Set<FacetParser> parsers) {
-        MapBuilder<String, FacetParser> builder = MapBuilder.newMapBuilder();
-        for (FacetParser parser : parsers) {
-            for (String type : parser.types()) {
-                builder.put(type, parser);
-            }
-        }
-        this.parsers = builder.immutableMap();
-    }
-
-    public FacetParser parser(String type) {
-        return parsers.get(type);
-    }
-}

+ 0 - 204
src/main/java/org/elasticsearch/search/facet/FacetPhase.java

@@ -1,204 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
-import org.apache.lucene.index.AtomicReaderContext;
-import org.apache.lucene.search.*;
-import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.ElasticsearchIllegalStateException;
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.lucene.docset.AllDocIdSet;
-import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
-import org.elasticsearch.common.lucene.search.*;
-import org.elasticsearch.search.SearchParseElement;
-import org.elasticsearch.search.SearchPhase;
-import org.elasticsearch.search.facet.nested.NestedFacetExecutor;
-import org.elasticsearch.search.internal.SearchContext;
-import org.elasticsearch.search.query.QueryPhaseExecutionException;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-/**
- *
- */
-public class FacetPhase implements SearchPhase {
-
-    private final FacetParseElement facetParseElement;
-
-    private final FacetBinaryParseElement facetBinaryParseElement;
-
-    @Inject
-    public FacetPhase(FacetParseElement facetParseElement, FacetBinaryParseElement facetBinaryParseElement) {
-        this.facetParseElement = facetParseElement;
-        this.facetBinaryParseElement = facetBinaryParseElement;
-    }
-
-    @Override
-    public Map<String, ? extends SearchParseElement> parseElements() {
-        return ImmutableMap.of("facets", facetParseElement, "facets_binary", facetBinaryParseElement, "facetsBinary", facetBinaryParseElement);
-    }
-
-    @Override
-    public void preProcess(SearchContext context) {
-        if (context.facets() != null && context.facets().hasQuery()) {
-            for (SearchContextFacets.Entry entry : context.facets().entries()) {
-                if (entry.isGlobal()) {
-                    continue;
-                }
-                if (entry.getMode() == FacetExecutor.Mode.COLLECTOR) {
-                    // TODO: We can pass the filter as param to collector method, then this filter wrapper logic can
-                    // be moved to NestedFacetExecutor impl, the other implementations would just wrap it into
-                    // FilteredCollector.
-                    Collector collector = entry.getFacetExecutor().collector();
-
-                    if (entry.getFilter() != null) {
-                        if (collector instanceof NestedFacetExecutor.Collector) {
-                            // We get rootDoc ids as hits in the collect method, so we need to first translate from
-                            // rootDoc hit to nested doc hit and then apply filter.
-                            collector = new NestedFacetExecutor.Collector((NestedFacetExecutor.Collector) collector, entry.getFilter());
-                            // If we would first apply the filter on the rootDoc level and then translate it back to the
-                            // nested docs we ignore the facet filter and all nested docs are passed to facet collector
-                        } else {
-                            collector = new FilteredCollector(collector, entry.getFilter());
-                        }
-                    }
-                    context.searcher().addMainQueryCollector(collector);
-                } else if (entry.getMode() == FacetExecutor.Mode.POST) {
-                    context.searcher().enableMainDocIdSetCollector();
-                } else {
-                    throw new ElasticsearchIllegalStateException("what mode?");
-                }
-            }
-        }
-    }
-
-    @Override
-    public void execute(SearchContext context) throws ElasticsearchException {
-        if (context.facets() == null) {
-            return;
-        }
-
-        if (context.queryResult().facets() != null) {
-            // no need to compute the facets twice, they should be computed on a per context basis
-            return;
-        }
-
-        Map<Filter, List<Collector>> filtersByCollector = null;
-        List<ContextDocIdSet> globalDocSets = null;
-        for (SearchContextFacets.Entry entry : context.facets().entries()) {
-            if (!entry.isGlobal()) {
-                if (entry.getMode() == FacetExecutor.Mode.POST) {
-                    FacetExecutor.Post post = entry.getFacetExecutor().post();
-                    if (entry.getFilter() != null) {
-                        if (post instanceof NestedFacetExecutor.Post) {
-                            post = new NestedFacetExecutor.Post((NestedFacetExecutor.Post) post, entry.getFilter());
-                        } else {
-                            post = new FacetExecutor.Post.Filtered(post, entry.getFilter());
-                        }
-                    }
-                    try {
-                        post.executePost(context.searcher().mainDocIdSetCollector().docSets());
-                    } catch (Exception e) {
-                        throw new QueryPhaseExecutionException(context, "failed to execute facet [" + entry.getFacetName() + "]", e);
-                    }
-                }
-            } else {
-                if (entry.getMode() == FacetExecutor.Mode.POST) {
-                    if (globalDocSets == null) {
-                        // build global post entries, map a reader context to a live docs docIdSet
-                        List<AtomicReaderContext> leaves = context.searcher().getIndexReader().leaves();
-                        globalDocSets = new ArrayList<>(leaves.size());
-                        for (AtomicReaderContext leaf : leaves) {
-                            globalDocSets.add(new ContextDocIdSet(
-                                    leaf,
-                                    BitsFilteredDocIdSet.wrap(new AllDocIdSet(leaf.reader().maxDoc()), leaf.reader().getLiveDocs())) // need to only include live docs
-                            );
-                        }
-                    }
-                    try {
-                        FacetExecutor.Post post = entry.getFacetExecutor().post();
-                        if (entry.getFilter() != null) {
-                            if (post instanceof NestedFacetExecutor.Post) {
-                                post = new NestedFacetExecutor.Post((NestedFacetExecutor.Post) post, entry.getFilter());
-                            } else {
-                                post = new FacetExecutor.Post.Filtered(post, entry.getFilter());
-                            }
-                        }
-                        post.executePost(globalDocSets);
-                    } catch (Exception e) {
-                        throw new QueryPhaseExecutionException(context, "Failed to execute facet [" + entry.getFacetName() + "]", e);
-                    }
-                } else if (entry.getMode() == FacetExecutor.Mode.COLLECTOR) {
-                    Filter filter = Queries.MATCH_ALL_FILTER;
-                    Collector collector = entry.getFacetExecutor().collector();
-                    if (entry.getFilter() != null) {
-                        if (collector instanceof NestedFacetExecutor.Collector) {
-                            collector = new NestedFacetExecutor.Collector((NestedFacetExecutor.Collector) collector, entry.getFilter());
-                        } else {
-                            collector = new FilteredCollector(collector, entry.getFilter());
-                        }
-                    }
-                    if (filtersByCollector == null) {
-                        filtersByCollector = Maps.newHashMap();
-                    }
-                    List<Collector> list = filtersByCollector.get(filter);
-                    if (list == null) {
-                        list = new ArrayList<>();
-                        filtersByCollector.put(filter, list);
-                    }
-                    list.add(collector);
-                }
-            }
-        }
-
-        // optimize the global collector based execution
-        if (filtersByCollector != null) {
-            // now, go and execute the filters->collector ones
-            for (Map.Entry<Filter, List<Collector>> entry : filtersByCollector.entrySet()) {
-                Filter filter = entry.getKey();
-                Query query = new XConstantScoreQuery(filter);
-                Filter searchFilter = context.searchFilter(context.types());
-                if (searchFilter != null) {
-                    query = new XFilteredQuery(query, searchFilter);
-                }
-                try {
-                    context.searcher().search(query, MultiCollector.wrap(entry.getValue().toArray(new Collector[entry.getValue().size()])));
-                    for (Collector collector : entry.getValue()) {
-                        if (collector instanceof XCollector) {
-                            ((XCollector) collector).postCollection();
-                        }
-                    }
-                } catch (Exception e) {
-                    throw new QueryPhaseExecutionException(context, "Failed to execute global facets", e);
-                }
-            }
-        }
-
-        List<Facet> facets = new ArrayList<>(context.facets().entries().size());
-        for (SearchContextFacets.Entry entry : context.facets().entries()) {
-            facets.add(entry.getFacetExecutor().buildFacet(entry.getFacetName()));
-        }
-        context.queryResult().facets(new InternalFacets(facets));
-    }
-}

+ 0 - 36
src/main/java/org/elasticsearch/search/facet/FacetPhaseExecutionException.java

@@ -1,36 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import org.elasticsearch.ElasticsearchException;
-
-/**
- *
- */
-public class FacetPhaseExecutionException extends ElasticsearchException {
-
-    public FacetPhaseExecutionException(String facetName, String msg) {
-        super("Facet [" + facetName + "]: " + msg);
-    }
-
-    public FacetPhaseExecutionException(String facetName, String msg, Throwable t) {
-        super("Facet [" + facetName + "]: " + msg, t);
-    }
-}

+ 0 - 56
src/main/java/org/elasticsearch/search/facet/Facets.java

@@ -1,56 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Facets of search action.
- *
- *
- */
-public interface Facets extends Iterable<Facet> {
-
-    /**
-     * The list of {@link Facet}s.
-     */
-    List<Facet> facets();
-
-    /**
-     * Returns the {@link Facet}s keyed by facet name.
-     */
-    Map<String, Facet> getFacets();
-
-    /**
-     * Returns the {@link Facet}s keyed by facet name.
-     */
-    Map<String, Facet> facetsAsMap();
-
-    /**
-     * Returns the facet by name already casted to the specified type.
-     */
-    <T extends Facet> T facet(Class<T> facetType, String name);
-
-    /**
-     * A facet of the specified name.
-     */
-    <T extends Facet> T facet(String name);
-}

+ 0 - 109
src/main/java/org/elasticsearch/search/facet/InternalFacet.java

@@ -1,109 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import com.google.common.collect.ImmutableMap;
-import org.elasticsearch.cache.recycler.CacheRecycler;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.collect.MapBuilder;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
-import org.elasticsearch.common.xcontent.ToXContent;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- *
- */
-public abstract class InternalFacet implements Facet, Streamable, ToXContent {
-
-    public static class ReduceContext {
-        private final CacheRecycler cacheRecycler;
-        private final List<Facet> facets;
-
-        public ReduceContext(CacheRecycler cacheRecycler, List<Facet> facets) {
-            this.cacheRecycler = cacheRecycler;
-            this.facets = facets;
-        }
-
-        public CacheRecycler cacheRecycler() {
-            return cacheRecycler;
-        }
-
-        public List<Facet> facets() {
-            return facets;
-        }
-    }
-
-    private String facetName;
-
-    /**
-     * Here just for streams...
-     */
-    protected InternalFacet() {
-
-    }
-
-    protected InternalFacet(String facetName) {
-        this.facetName = facetName;
-    }
-
-    public abstract BytesReference streamType();
-
-    public abstract Facet reduce(ReduceContext context);
-
-    public static interface Stream {
-        Facet readFacet(StreamInput in) throws IOException;
-    }
-
-    public static class Streams {
-
-        private static ImmutableMap<BytesReference, Stream> streams = ImmutableMap.of();
-
-        public static synchronized void registerStream(Stream stream, BytesReference... types) {
-            MapBuilder<BytesReference, Stream> uStreams = MapBuilder.newMapBuilder(streams);
-            for (BytesReference type : types) {
-                uStreams.put(type, stream);
-            }
-            streams = uStreams.immutableMap();
-        }
-
-        public static Stream stream(BytesReference type) {
-            return streams.get(type);
-        }
-    }
-
-    @Override
-    public final String getName() {
-        return facetName;
-    }
-
-    @Override
-    public void readFrom(StreamInput in) throws IOException {
-        facetName = in.readString();
-    }
-
-    @Override
-    public void writeTo(StreamOutput out) throws IOException {
-        out.writeString(facetName);
-    }
-}

+ 0 - 164
src/main/java/org/elasticsearch/search/facet/InternalFacets.java

@@ -1,164 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
-import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentBuilderString;
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import static com.google.common.collect.Maps.newHashMap;
-
-/**
- *
- */
-public class InternalFacets implements Facets, Streamable, ToXContent, Iterable<Facet> {
-
-    private List<Facet> facets = ImmutableList.of();
-
-    private Map<String, Facet> facetsAsMap;
-
-    private InternalFacets() {
-
-    }
-
-    /**
-     * Constructs a new facets.
-     */
-    public InternalFacets(List<Facet> facets) {
-        this.facets = facets;
-    }
-
-    /**
-     * Iterates over the {@link Facet}s.
-     */
-    @Override
-    public Iterator<Facet> iterator() {
-        return facets.iterator();
-    }
-
-    /**
-     * The list of {@link Facet}s.
-     */
-    public List<Facet> facets() {
-        return facets;
-    }
-
-    /**
-     * Returns the {@link Facet}s keyed by map.
-     */
-    public Map<String, Facet> getFacets() {
-        return facetsAsMap();
-    }
-
-    /**
-     * Returns the {@link Facet}s keyed by map.
-     */
-    public Map<String, Facet> facetsAsMap() {
-        if (facetsAsMap != null) {
-            return facetsAsMap;
-        }
-        Map<String, Facet> facetsAsMap = newHashMap();
-        for (Facet facet : facets) {
-            facetsAsMap.put(facet.getName(), facet);
-        }
-        this.facetsAsMap = facetsAsMap;
-        return facetsAsMap;
-    }
-
-    /**
-     * Returns the facet by name already casted to the specified type.
-     */
-    @Override
-    public <T extends Facet> T facet(Class<T> facetType, String name) {
-        return facetType.cast(facet(name));
-    }
-
-    /**
-     * A facet of the specified name.
-     */
-    @SuppressWarnings({"unchecked"})
-    @Override
-    public <T extends Facet> T facet(String name) {
-        return (T) facetsAsMap().get(name);
-    }
-
-    static final class Fields {
-        static final XContentBuilderString FACETS = new XContentBuilderString("facets");
-    }
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startObject(Fields.FACETS);
-        for (Facet facet : facets) {
-            ((InternalFacet) facet).toXContent(builder, params);
-        }
-        builder.endObject();
-        return builder;
-    }
-
-    public static InternalFacets readFacets(StreamInput in) throws IOException {
-        InternalFacets result = new InternalFacets();
-        result.readFrom(in);
-        return result;
-    }
-
-    public static InternalFacets readOptionalFacets(StreamInput in) throws IOException {
-        return in.readOptionalStreamable(new InternalFacets());
-    }
-
-    @Override
-    public void readFrom(StreamInput in) throws IOException {
-        int size = in.readVInt();
-        if (size == 0) {
-            facets = ImmutableList.of();
-            facetsAsMap = ImmutableMap.of();
-        } else {
-            facets = Lists.newArrayListWithCapacity(size);
-            for (int i = 0; i < size; i++) {
-                BytesReference type = in.readBytesReference();
-                Facet facet = InternalFacet.Streams.stream(type).readFacet(in);
-                facets.add(facet);
-            }
-        }
-    }
-
-    @Override
-    public void writeTo(StreamOutput out) throws IOException {
-        out.writeVInt(facets.size());
-        for (Facet facet : facets) {
-            InternalFacet internalFacet = (InternalFacet) facet;
-            out.writeBytesReference(internalFacet.streamType());
-            internalFacet.writeTo(out);
-        }
-    }
-}
-

+ 0 - 52
src/main/java/org/elasticsearch/search/facet/LongFacetAggregatorBase.java

@@ -1,52 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet;
-
-import org.apache.lucene.index.SortedNumericDocValues;
-import org.apache.lucene.util.LongValues;
-
-/**
- * Simple Facet aggregator base class for {@link LongValues}
- */
-public abstract class LongFacetAggregatorBase {
-    private int total;
-    private int missing;
-
-    public void onDoc(int docId, SortedNumericDocValues values) {
-        values.setDocument(docId);
-        final int numValues = values.count();
-        int tempMissing = 1;
-        for (int i = 0; i < numValues; i++) {
-            tempMissing = 0;
-            onValue(docId, values.valueAt(i));
-            total++;
-        }
-        missing += tempMissing;
-    }
-
-    protected abstract void onValue(int docId, long next);
-
-    public final int total() {
-        return total;
-    }
-
-    public final int missing() {
-        return missing;
-    }
-}

+ 0 - 102
src/main/java/org/elasticsearch/search/facet/SearchContextFacets.java

@@ -1,102 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet;
-
-import org.apache.lucene.search.Filter;
-import org.elasticsearch.common.Nullable;
-
-import java.util.List;
-
-/**
- *
- */
-public class SearchContextFacets {
-
-    public static class Entry {
-        private final String facetName;
-        private final FacetExecutor.Mode mode;
-        private final FacetExecutor facetExecutor;
-        private final boolean global;
-        @Nullable
-        private final Filter filter;
-
-        public Entry(String facetName, FacetExecutor.Mode mode, FacetExecutor facetExecutor, boolean global, @Nullable Filter filter) {
-            this.facetName = facetName;
-            this.mode = mode;
-            this.facetExecutor = facetExecutor;
-            this.global = global;
-            this.filter = filter;
-        }
-
-        public String getFacetName() {
-            return facetName;
-        }
-
-        public FacetExecutor.Mode getMode() {
-            return mode;
-        }
-
-        public FacetExecutor getFacetExecutor() {
-            return facetExecutor;
-        }
-
-        public boolean isGlobal() {
-            return global;
-        }
-
-        public Filter getFilter() {
-            return filter;
-        }
-    }
-
-    private final List<Entry> entries;
-
-    private boolean hasQuery;
-    private boolean hasGlobal;
-
-    public SearchContextFacets(List<Entry> entries) {
-        this.entries = entries;
-        for (Entry entry : entries) {
-            if (entry.global) {
-                hasGlobal = true;
-            } else {
-                hasQuery = true;
-            }
-        }
-    }
-
-    public List<Entry> entries() {
-        return this.entries;
-    }
-
-    /**
-     * Are there facets that need to be computed on the query hits?
-     */
-    public boolean hasQuery() {
-        return hasQuery;
-    }
-
-    /**
-     * Are there global facets that need to be computed on all the docs.
-     */
-    public boolean hasGlobal() {
-        return hasGlobal;
-    }
-}

+ 0 - 49
src/main/java/org/elasticsearch/search/facet/TransportFacetModule.java

@@ -1,49 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet;
-
-import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.search.facet.datehistogram.InternalDateHistogramFacet;
-import org.elasticsearch.search.facet.filter.InternalFilterFacet;
-import org.elasticsearch.search.facet.geodistance.InternalGeoDistanceFacet;
-import org.elasticsearch.search.facet.histogram.InternalHistogramFacet;
-import org.elasticsearch.search.facet.query.InternalQueryFacet;
-import org.elasticsearch.search.facet.range.InternalRangeFacet;
-import org.elasticsearch.search.facet.statistical.InternalStatisticalFacet;
-import org.elasticsearch.search.facet.terms.InternalTermsFacet;
-import org.elasticsearch.search.facet.termsstats.InternalTermsStatsFacet;
-
-/**
- *
- */
-public class TransportFacetModule extends AbstractModule {
-
-    @Override
-    protected void configure() {
-        InternalFilterFacet.registerStreams();
-        InternalQueryFacet.registerStreams();
-        InternalGeoDistanceFacet.registerStreams();
-        InternalHistogramFacet.registerStreams();
-        InternalDateHistogramFacet.registerStreams();
-        InternalRangeFacet.registerStreams();
-        InternalStatisticalFacet.registerStreams();
-        InternalTermsFacet.registerStreams();
-        InternalTermsStatsFacet.registerStreams();
-    }
-}

+ 0 - 120
src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java

@@ -1,120 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import com.carrotsearch.hppc.LongLongOpenHashMap;
-import org.apache.lucene.index.AtomicReaderContext;
-import org.apache.lucene.index.SortedNumericDocValues;
-import org.elasticsearch.cache.recycler.CacheRecycler;
-import org.elasticsearch.common.recycler.Recycler;
-import org.elasticsearch.common.rounding.Rounding;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.InternalFacet;
-import org.elasticsearch.search.facet.LongFacetAggregatorBase;
-
-import java.io.IOException;
-
-/**
- * A date histogram facet collector that uses the same field as the key as well as the
- * value.
- */
-public class CountDateHistogramFacetExecutor extends FacetExecutor {
-
-    private final Rounding tzRounding;
-    private final IndexNumericFieldData indexFieldData;
-    final DateHistogramFacet.ComparatorType comparatorType;
-
-    final Recycler.V<LongLongOpenHashMap> counts;
-
-    public CountDateHistogramFacetExecutor(IndexNumericFieldData indexFieldData, Rounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) {
-        this.comparatorType = comparatorType;
-        this.indexFieldData = indexFieldData;
-        this.tzRounding = tzRounding;
-
-        this.counts = cacheRecycler.longLongMap(-1);
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector();
-    }
-
-    @Override
-    public InternalFacet buildFacet(String facetName) {
-        InternalCountDateHistogramFacet.CountEntry[] countEntries = new InternalCountDateHistogramFacet.CountEntry[counts.v().size()];
-        final boolean[] states = counts.v().allocated;
-        final long[] keys = counts.v().keys;
-        final long[] values = counts.v().values;
-
-        int entryIndex = 0;
-        for (int i = 0; i < states.length; i++) {
-            if (states[i]) {
-                countEntries[entryIndex++] = new InternalCountDateHistogramFacet.CountEntry(keys[i], values[i]);
-            }
-        }
-        counts.close();
-        return new InternalCountDateHistogramFacet(facetName, comparatorType, countEntries);
-    }
-
-    class Collector extends FacetExecutor.Collector {
-
-        private SortedNumericDocValues values;
-        private final DateHistogramProc histoProc;
-
-        public Collector() {
-            this.histoProc = new DateHistogramProc(counts.v(), tzRounding);
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            values = indexFieldData.load(context).getLongValues();
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-            histoProc.onDoc(doc, values);
-        }
-
-        @Override
-        public void postCollection() {
-        }
-    }
-
-    public static class DateHistogramProc extends LongFacetAggregatorBase {
-
-        private final LongLongOpenHashMap counts;
-        private final Rounding tzRounding;
-
-        public DateHistogramProc(LongLongOpenHashMap counts, Rounding tzRounding) {
-            this.counts = counts;
-            this.tzRounding = tzRounding;
-        }
-
-        @Override
-        public void onValue(int docId, long value) {
-            counts.addTo(tzRounding.round(value), 1);
-        }
-
-        public LongLongOpenHashMap counts() {
-            return counts;
-        }
-    }
-}

+ 0 - 180
src/main/java/org/elasticsearch/search/facet/datehistogram/DateHistogramFacet.java

@@ -1,180 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import org.elasticsearch.ElasticsearchIllegalArgumentException;
-import org.elasticsearch.search.facet.Facet;
-
-import java.util.Comparator;
-import java.util.List;
-
-/**
- * A date histogram facet.
- */
-public interface DateHistogramFacet extends Facet, Iterable<DateHistogramFacet.Entry> {
-
-    /**
-     * The type of the filter facet.
-     */
-    public static final String TYPE = "date_histogram";
-
-    /**
-     * An ordered list of histogram facet entries.
-     */
-    List<? extends Entry> getEntries();
-
-    public static enum ComparatorType {
-        TIME((byte) 0, "time", new Comparator<Entry>() {
-
-            @Override
-            public int compare(Entry o1, Entry o2) {
-                // push nulls to the end
-                if (o1 == null) {
-                    if (o2 == null) {
-                        return 0;
-                    }
-                    return 1;
-                }
-                if (o2 == null) {
-                    return -1;
-                }
-                return (o1.getTime() < o2.getTime() ? -1 : (o1.getTime() == o2.getTime() ? 0 : 1));
-            }
-        }),
-        COUNT((byte) 1, "count", new Comparator<Entry>() {
-
-            @Override
-            public int compare(Entry o1, Entry o2) {
-                // push nulls to the end
-                if (o1 == null) {
-                    if (o2 == null) {
-                        return 0;
-                    }
-                    return 1;
-                }
-                if (o2 == null) {
-                    return -1;
-                }
-                return (o1.getCount() < o2.getCount() ? -1 : (o1.getCount() == o2.getCount() ? 0 : 1));
-            }
-        }),
-        TOTAL((byte) 2, "total", new Comparator<Entry>() {
-
-            @Override
-            public int compare(Entry o1, Entry o2) {
-                // push nulls to the end
-                if (o1 == null) {
-                    if (o2 == null) {
-                        return 0;
-                    }
-                    return 1;
-                }
-                if (o2 == null) {
-                    return -1;
-                }
-                return (o1.getTotal() < o2.getTotal() ? -1 : (o1.getTotal() == o2.getTotal() ? 0 : 1));
-            }
-        });
-
-        private final byte id;
-
-        private final String description;
-
-        private final Comparator<Entry> comparator;
-
-        ComparatorType(byte id, String description, Comparator<Entry> comparator) {
-            this.id = id;
-            this.description = description;
-            this.comparator = comparator;
-        }
-
-        public byte id() {
-            return this.id;
-        }
-
-        public String description() {
-            return this.description;
-        }
-
-        public Comparator<Entry> comparator() {
-            return comparator;
-        }
-
-        public static ComparatorType fromId(byte id) {
-            if (id == 0) {
-                return TIME;
-            } else if (id == 1) {
-                return COUNT;
-            } else if (id == 2) {
-                return TOTAL;
-            }
-            throw new ElasticsearchIllegalArgumentException("No type argument match for histogram comparator [" + id + "]");
-        }
-
-        public static ComparatorType fromString(String type) {
-            if ("time".equals(type)) {
-                return TIME;
-            } else if ("count".equals(type)) {
-                return COUNT;
-            } else if ("total".equals(type)) {
-                return TOTAL;
-            }
-            throw new ElasticsearchIllegalArgumentException("No type argument match for histogram comparator [" + type + "]");
-        }
-    }
-
-    public interface Entry {
-
-        /**
-         * The time bucket start (in milliseconds).
-         */
-        long getTime();
-
-        /**
-         * The number of hits that fall within that key "range" or "interval".
-         */
-        long getCount();
-
-        /**
-         * The total count of values aggregated to compute the total.
-         */
-        long getTotalCount();
-
-        /**
-         * The sum / total of the value field that fall within this key "interval".
-         */
-        double getTotal();
-
-        /**
-         * The mean of this facet interval.
-         */
-        double getMean();
-
-        /**
-         * The minimum value.
-         */
-        double getMin();
-
-        /**
-         * The maximum value.
-         */
-        double getMax();
-    }
-}

+ 0 - 264
src/main/java/org/elasticsearch/search/facet/datehistogram/DateHistogramFacetBuilder.java

@@ -1,264 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import com.google.common.collect.Maps;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.query.FilterBuilder;
-import org.elasticsearch.search.builder.SearchSourceBuilderException;
-import org.elasticsearch.search.facet.FacetBuilder;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * A facet builder of date histogram facets.
- */
-public class DateHistogramFacetBuilder extends FacetBuilder {
-    private String keyFieldName;
-    private String valueFieldName;
-    private String interval = null;
-    private String preZone = null;
-    private String postZone = null;
-    private Boolean preZoneAdjustLargeInterval;
-    long preOffset = 0;
-    long postOffset = 0;
-    float factor = 1.0f;
-    private DateHistogramFacet.ComparatorType comparatorType;
-
-    private String valueScript;
-    private Map<String, Object> params;
-    private String lang;
-
-    /**
-     * Constructs a new date histogram facet with the provided facet logical name.
-     *
-     * @param name The logical name of the facet
-     */
-    public DateHistogramFacetBuilder(String name) {
-        super(name);
-    }
-
-    /**
-     * The field name to perform the histogram facet. Translates to perform the histogram facet
-     * using the provided field as both the {@link #keyField(String)} and {@link #valueField(String)}.
-     */
-    public DateHistogramFacetBuilder field(String field) {
-        this.keyFieldName = field;
-        return this;
-    }
-
-    /**
-     * The field name to use in order to control where the hit will "fall into" within the histogram
-     * entries. Essentially, using the key field numeric value, the hit will be "rounded" into the relevant
-     * bucket controlled by the interval.
-     */
-    public DateHistogramFacetBuilder keyField(String keyField) {
-        this.keyFieldName = keyField;
-        return this;
-    }
-
-    /**
-     * The field name to use as the value of the hit to compute data based on values within the interval
-     * (for example, total).
-     */
-    public DateHistogramFacetBuilder valueField(String valueField) {
-        this.valueFieldName = valueField;
-        return this;
-    }
-
-    public DateHistogramFacetBuilder valueScript(String valueScript) {
-        this.valueScript = valueScript;
-        return this;
-    }
-
-    public DateHistogramFacetBuilder param(String name, Object value) {
-        if (params == null) {
-            params = Maps.newHashMap();
-        }
-        params.put(name, value);
-        return this;
-    }
-
-    /**
-     * The language of the value script.
-     */
-    public DateHistogramFacetBuilder lang(String lang) {
-        this.lang = lang;
-        return this;
-    }
-
-    /**
-     * The interval used to control the bucket "size" where each key value of a hit will fall into. Check
-     * the docs for all available values.
-     */
-    public DateHistogramFacetBuilder interval(String interval) {
-        this.interval = interval;
-        return this;
-    }
-
-    /**
-     * Should pre zone be adjusted for large (day and above) intervals. Defaults to <tt>false</tt>.
-     */
-    public DateHistogramFacetBuilder preZoneAdjustLargeInterval(boolean preZoneAdjustLargeInterval) {
-        this.preZoneAdjustLargeInterval = preZoneAdjustLargeInterval;
-        return this;
-    }
-
-    /**
-     * Sets the pre time zone to use when bucketing the values. This timezone will be applied before
-     * rounding off the result.
-     * <p/>
-     * Can either be in the form of "-10:00" or
-     * one of the values listed here: http://joda-time.sourceforge.net/timezones.html.
-     */
-    public DateHistogramFacetBuilder preZone(String preZone) {
-        this.preZone = preZone;
-        return this;
-    }
-
-    /**
-     * Sets the post time zone to use when bucketing the values. This timezone will be applied after
-     * rounding off the result.
-     * <p/>
-     * Can either be in the form of "-10:00" or
-     * one of the values listed here: http://joda-time.sourceforge.net/timezones.html.
-     */
-    public DateHistogramFacetBuilder postZone(String postZone) {
-        this.postZone = postZone;
-        return this;
-    }
-
-    /**
-     * Sets a pre offset that will be applied before rounding the results.
-     */
-    public DateHistogramFacetBuilder preOffset(TimeValue preOffset) {
-        this.preOffset = preOffset.millis();
-        return this;
-    }
-
-    /**
-     * Sets a post offset that will be applied after rounding the results.
-     */
-    public DateHistogramFacetBuilder postOffset(TimeValue postOffset) {
-        this.postOffset = postOffset.millis();
-        return this;
-    }
-
-    /**
-     * Sets the factor that will be used to multiply the value with before and divided
-     * by after the rounding of the results.
-     */
-    public DateHistogramFacetBuilder factor(float factor) {
-        this.factor = factor;
-        return this;
-    }
-
-    public DateHistogramFacetBuilder comparator(DateHistogramFacet.ComparatorType comparatorType) {
-        this.comparatorType = comparatorType;
-        return this;
-    }
-
-    /**
-     * Should the facet run in global mode (not bounded by the search query) or not (bounded by
-     * the search query). Defaults to <tt>false</tt>.
-     */
-    @Override
-    public DateHistogramFacetBuilder global(boolean global) {
-        super.global(global);
-        return this;
-    }
-
-    /**
-     * An additional filter used to further filter down the set of documents the facet will run on.
-     */
-    @Override
-    public DateHistogramFacetBuilder facetFilter(FilterBuilder filter) {
-        this.facetFilter = filter;
-        return this;
-    }
-
-    /**
-     * Sets the nested path the facet will execute on. A match (root object) will then cause all the
-     * nested objects matching the path to be computed into the facet.
-     */
-    public DateHistogramFacetBuilder nested(String nested) {
-        this.nested = nested;
-        return this;
-    }
-
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        if (keyFieldName == null) {
-            throw new SearchSourceBuilderException("field must be set on date histogram facet for facet [" + name + "]");
-        }
-        if (interval == null) {
-            throw new SearchSourceBuilderException("interval must be set on date histogram facet for facet [" + name + "]");
-        }
-        builder.startObject(name);
-
-        builder.startObject(DateHistogramFacet.TYPE);
-        if (valueFieldName != null) {
-            builder.field("key_field", keyFieldName);
-            builder.field("value_field", valueFieldName);
-        } else {
-            builder.field("field", keyFieldName);
-        }
-        if (valueScript != null) {
-            builder.field("value_script", valueScript);
-            if (lang != null) {
-                builder.field("lang", lang);
-            }
-            if (this.params != null) {
-                builder.field("params", this.params);
-            }
-        }
-        builder.field("interval", interval);
-        if (preZone != null) {
-            builder.field("pre_zone", preZone);
-        }
-        if (preZoneAdjustLargeInterval != null) {
-            builder.field("pre_zone_adjust_large_interval", preZoneAdjustLargeInterval);
-        }
-        if (postZone != null) {
-            builder.field("post_zone", postZone);
-        }
-        if (preOffset != 0) {
-            builder.field("pre_offset", preOffset);
-        }
-        if (postOffset != 0) {
-            builder.field("post_offset", postOffset);
-        }
-        if (factor != 1.0f) {
-            builder.field("factor", factor);
-        }
-        if (comparatorType != null) {
-            builder.field("comparator", comparatorType.description());
-        }
-        builder.endObject();
-
-        addFilterFacetAndGlobal(builder, params);
-
-        builder.endObject();
-        return builder;
-    }
-}

+ 0 - 222
src/main/java/org/elasticsearch/search/facet/datehistogram/DateHistogramFacetParser.java

@@ -1,222 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import com.google.common.collect.ImmutableMap;
-import org.elasticsearch.common.collect.MapBuilder;
-import org.elasticsearch.common.component.AbstractComponent;
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.joda.DateMathParser;
-import org.elasticsearch.common.rounding.DateTimeUnit;
-import org.elasticsearch.common.rounding.Rounding;
-import org.elasticsearch.common.rounding.TimeZoneRounding;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.index.mapper.FieldMapper;
-import org.elasticsearch.script.ScriptService;
-import org.elasticsearch.script.SearchScript;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.FacetParser;
-import org.elasticsearch.search.facet.FacetPhaseExecutionException;
-import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.Chronology;
-import org.joda.time.DateTimeZone;
-import org.joda.time.chrono.ISOChronology;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- *
- */
-public class DateHistogramFacetParser extends AbstractComponent implements FacetParser {
-
-    private final ImmutableMap<String, DateTimeUnit> dateTimeUnits;
-
-    @Inject
-    public DateHistogramFacetParser(Settings settings) {
-        super(settings);
-        InternalDateHistogramFacet.registerStreams();
-
-        dateTimeUnits = MapBuilder.<String, DateTimeUnit>newMapBuilder()
-                .put("year", DateTimeUnit.YEAR_OF_CENTURY)
-                .put("1y", DateTimeUnit.YEAR_OF_CENTURY)
-                .put("quarter", DateTimeUnit.QUARTER)
-                .put("1q", DateTimeUnit.QUARTER)
-                .put("month", DateTimeUnit.MONTH_OF_YEAR)
-                .put("1M", DateTimeUnit.MONTH_OF_YEAR)
-                .put("week", DateTimeUnit.WEEK_OF_WEEKYEAR)
-                .put("1w", DateTimeUnit.WEEK_OF_WEEKYEAR)
-                .put("day", DateTimeUnit.DAY_OF_MONTH)
-                .put("1d", DateTimeUnit.DAY_OF_MONTH)
-                .put("hour", DateTimeUnit.HOUR_OF_DAY)
-                .put("1h", DateTimeUnit.HOUR_OF_DAY)
-                .put("minute", DateTimeUnit.MINUTES_OF_HOUR)
-                .put("1m", DateTimeUnit.MINUTES_OF_HOUR)
-                .put("second", DateTimeUnit.SECOND_OF_MINUTE)
-                .put("1s", DateTimeUnit.SECOND_OF_MINUTE)
-                .immutableMap();
-    }
-
-    @Override
-    public String[] types() {
-        return new String[]{DateHistogramFacet.TYPE, "dateHistogram"};
-    }
-
-    @Override
-    public FacetExecutor.Mode defaultMainMode() {
-        return FacetExecutor.Mode.COLLECTOR;
-    }
-
-    @Override
-    public FacetExecutor.Mode defaultGlobalMode() {
-        return FacetExecutor.Mode.COLLECTOR;
-    }
-
-    @Override
-    public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
-        String keyField = null;
-        String valueField = null;
-        String valueScript = null;
-        ScriptService.ScriptType valueScriptType = null;
-        String scriptLang = null;
-        Map<String, Object> params = null;
-        String interval = null;
-        DateTimeZone preZone = DateTimeZone.UTC;
-        DateTimeZone postZone = DateTimeZone.UTC;
-        boolean preZoneAdjustLargeInterval = false;
-        long preOffset = 0;
-        long postOffset = 0;
-        float factor = 1.0f;
-        Chronology chronology = ISOChronology.getInstanceUTC();
-        DateHistogramFacet.ComparatorType comparatorType = DateHistogramFacet.ComparatorType.TIME;
-        XContentParser.Token token;
-        String fieldName = null;
-        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
-            if (token == XContentParser.Token.FIELD_NAME) {
-                fieldName = parser.currentName();
-            } else if (token == XContentParser.Token.START_OBJECT) {
-                if ("params".equals(fieldName)) {
-                    params = parser.map();
-                }
-            } else if (token.isValue()) {
-                if ("field".equals(fieldName)) {
-                    keyField = parser.text();
-                } else if ("key_field".equals(fieldName) || "keyField".equals(fieldName)) {
-                    keyField = parser.text();
-                } else if ("value_field".equals(fieldName) || "valueField".equals(fieldName)) {
-                    valueField = parser.text();
-                } else if ("interval".equals(fieldName)) {
-                    interval = parser.text();
-                } else if ("time_zone".equals(fieldName) || "timeZone".equals(fieldName)) {
-                    preZone = parseZone(parser, token);
-                } else if ("pre_zone".equals(fieldName) || "preZone".equals(fieldName)) {
-                    preZone = parseZone(parser, token);
-                } else if ("pre_zone_adjust_large_interval".equals(fieldName) || "preZoneAdjustLargeInterval".equals(fieldName)) {
-                    preZoneAdjustLargeInterval = parser.booleanValue();
-                } else if ("post_zone".equals(fieldName) || "postZone".equals(fieldName)) {
-                    postZone = parseZone(parser, token);
-                } else if ("pre_offset".equals(fieldName) || "preOffset".equals(fieldName)) {
-                    preOffset = parseOffset(parser.text());
-                } else if ("post_offset".equals(fieldName) || "postOffset".equals(fieldName)) {
-                    postOffset = parseOffset(parser.text());
-                } else if ("factor".equals(fieldName)) {
-                    factor = parser.floatValue();
-                } else if (ScriptService.VALUE_SCRIPT_INLINE.match(fieldName)) {
-                    valueScript = parser.text();
-                    valueScriptType = ScriptService.ScriptType.INLINE;
-                } else if (ScriptService.VALUE_SCRIPT_ID.match(fieldName)) {
-                    valueScript = parser.text();
-                    valueScriptType = ScriptService.ScriptType.INDEXED;
-                } else if (ScriptService.VALUE_SCRIPT_FILE.match(fieldName)) {
-                    valueScript = parser.text();
-                    valueScriptType = ScriptService.ScriptType.FILE;
-                } else if ("order".equals(fieldName) || "comparator".equals(fieldName)) {
-                    comparatorType = DateHistogramFacet.ComparatorType.fromString(parser.text());
-                } else if (ScriptService.SCRIPT_LANG.match(fieldName)) {
-                    scriptLang = parser.text();
-                }
-            }
-        }
-
-        if (interval == null) {
-            throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for histogram facet");
-        }
-
-        if (keyField == null) {
-            throw new FacetPhaseExecutionException(facetName, "key field is required to be set for histogram facet, either using [field] or using [key_field]");
-        }
-
-        FieldMapper keyMapper = context.smartNameFieldMapper(keyField);
-        if (keyMapper == null) {
-            throw new FacetPhaseExecutionException(facetName, "(key) field [" + keyField + "] not found");
-        }
-        IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyMapper);
-
-        TimeZoneRounding.Builder tzRoundingBuilder;
-        DateTimeUnit dateTimeUnit = dateTimeUnits.get(interval);
-        if (dateTimeUnit != null) {
-            tzRoundingBuilder = TimeZoneRounding.builder(dateTimeUnit);
-        } else {
-            // the interval is a time value?
-            tzRoundingBuilder = TimeZoneRounding.builder(TimeValue.parseTimeValue(interval, null));
-        }
-
-        Rounding tzRounding = tzRoundingBuilder
-                .preZone(preZone).postZone(postZone)
-                .preZoneAdjustLargeInterval(preZoneAdjustLargeInterval)
-                .preOffset(preOffset).postOffset(postOffset)
-                .factor(factor)
-                .build();
-
-        if (valueScript != null) {
-            SearchScript script = context.scriptService().search(context.lookup(), scriptLang, valueScript, valueScriptType, params);
-            return new ValueScriptDateHistogramFacetExecutor(keyIndexFieldData, script, tzRounding, comparatorType, context.cacheRecycler());
-        } else if (valueField != null) {
-            FieldMapper valueMapper = context.smartNameFieldMapper(valueField);
-            if (valueMapper == null) {
-                throw new FacetPhaseExecutionException(facetName, "(value) field [" + valueField + "] not found");
-            }
-            IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueMapper);
-            return new ValueDateHistogramFacetExecutor(keyIndexFieldData, valueIndexFieldData, tzRounding, comparatorType, context.cacheRecycler());
-        } else {
-            return new CountDateHistogramFacetExecutor(keyIndexFieldData, tzRounding, comparatorType, context.cacheRecycler());
-        }
-    }
-
-    private long parseOffset(String offset) throws IOException {
-        if (offset.charAt(0) == '-') {
-            return -TimeValue.parseTimeValue(offset.substring(1), null).millis();
-        }
-        int beginIndex = offset.charAt(0) == '+' ? 1 : 0;
-        return TimeValue.parseTimeValue(offset.substring(beginIndex), null).millis();
-    }
-
-    private DateTimeZone parseZone(XContentParser parser, XContentParser.Token token) throws IOException {
-        if (token == XContentParser.Token.VALUE_NUMBER) {
-            return DateTimeZone.forOffsetHours(parser.intValue());
-        } else {
-            return DateMathParser.parseZone(parser.text());
-        }
-    }
-
-}

+ 0 - 217
src/main/java/org/elasticsearch/search/facet/datehistogram/InternalCountDateHistogramFacet.java

@@ -1,217 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import com.carrotsearch.hppc.LongLongOpenHashMap;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.bytes.HashedBytesArray;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.recycler.Recycler;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentBuilderString;
-import org.elasticsearch.search.facet.Facet;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- *
- */
-public class InternalCountDateHistogramFacet extends InternalDateHistogramFacet {
-
-    private static final BytesReference STREAM_TYPE = new HashedBytesArray(Strings.toUTF8Bytes("cdHistogram"));
-
-    public static void registerStreams() {
-        Streams.registerStream(STREAM, STREAM_TYPE);
-    }
-
-    static Stream STREAM = new Stream() {
-        @Override
-        public Facet readFacet(StreamInput in) throws IOException {
-            return readHistogramFacet(in);
-        }
-    };
-
-    @Override
-    public BytesReference streamType() {
-        return STREAM_TYPE;
-    }
-
-
-    /**
-     * A histogram entry representing a single entry within the result of a histogram facet.
-     */
-    public static class CountEntry implements Entry {
-        private final long time;
-        private final long count;
-
-        public CountEntry(long time, long count) {
-            this.time = time;
-            this.count = count;
-        }
-
-        @Override
-        public long getTime() {
-            return time;
-        }
-
-        @Override
-        public long getCount() {
-            return count;
-        }
-
-        @Override
-        public long getTotalCount() {
-            return 0;
-        }
-
-        @Override
-        public double getTotal() {
-            return Double.NaN;
-        }
-
-        @Override
-        public double getMean() {
-            return Double.NaN;
-        }
-
-        @Override
-        public double getMin() {
-            return Double.NaN;
-        }
-
-        @Override
-        public double getMax() {
-            return Double.NaN;
-        }
-    }
-
-    ComparatorType comparatorType;
-    CountEntry[] entries = null;
-
-    InternalCountDateHistogramFacet() {
-    }
-
-    public InternalCountDateHistogramFacet(String name, ComparatorType comparatorType, CountEntry[] entries) {
-        super(name);
-        this.comparatorType = comparatorType;
-        this.entries = entries;
-    }
-
-    @Override
-    public List<CountEntry> getEntries() {
-        return Arrays.asList(entries);
-    }
-
-    @Override
-    public Iterator<Entry> iterator() {
-        return (Iterator) getEntries().iterator();
-    }
-
-    @Override
-    public Facet reduce(ReduceContext context) {
-        List<Facet> facets = context.facets();
-        if (facets.size() == 1) {
-            InternalCountDateHistogramFacet histoFacet = (InternalCountDateHistogramFacet) facets.get(0);
-            Arrays.sort(histoFacet.entries, histoFacet.comparatorType.comparator());
-            return facets.get(0);
-        }
-
-        Recycler.V<LongLongOpenHashMap> counts = context.cacheRecycler().longLongMap(-1);
-        for (Facet facet : facets) {
-            InternalCountDateHistogramFacet histoFacet = (InternalCountDateHistogramFacet) facet;
-            for (CountEntry entry : histoFacet.entries) {
-                counts.v().addTo(entry.getTime(), entry.getCount());
-            }
-        }
-
-        CountEntry[] countEntries = new CountEntry[counts.v().size()];
-        final boolean[] states = counts.v().allocated;
-        final long[] keys = counts.v().keys;
-        final long[] values = counts.v().values;
-        int entriesIndex = 0;
-        for (int i = 0; i < states.length; i++) {
-            if (states[i]) {
-                countEntries[entriesIndex++] = new CountEntry(keys[i], values[i]);
-            }
-        }
-        counts.close();
-
-        Arrays.sort(countEntries, comparatorType.comparator());
-
-        return new InternalCountDateHistogramFacet(getName(), comparatorType, countEntries);
-    }
-
-    static final class Fields {
-        static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
-        static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
-        static final XContentBuilderString TIME = new XContentBuilderString("time");
-        static final XContentBuilderString COUNT = new XContentBuilderString("count");
-    }
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startObject(getName());
-        builder.field(Fields._TYPE, TYPE);
-        builder.startArray(Fields.ENTRIES);
-        for (Entry entry : entries) {
-            builder.startObject();
-            builder.field(Fields.TIME, entry.getTime());
-            builder.field(Fields.COUNT, entry.getCount());
-            builder.endObject();
-        }
-        builder.endArray();
-        builder.endObject();
-        return builder;
-    }
-
-    public static InternalCountDateHistogramFacet readHistogramFacet(StreamInput in) throws IOException {
-        InternalCountDateHistogramFacet facet = new InternalCountDateHistogramFacet();
-        facet.readFrom(in);
-        return facet;
-    }
-
-    @Override
-    public void readFrom(StreamInput in) throws IOException {
-        super.readFrom(in);
-        comparatorType = ComparatorType.fromId(in.readByte());
-
-        int size = in.readVInt();
-        entries = new CountEntry[size];
-        for (int i = 0; i < size; i++) {
-            entries[i] = new CountEntry(in.readLong(), in.readVLong());
-        }
-    }
-
-    @Override
-    public void writeTo(StreamOutput out) throws IOException {
-        super.writeTo(out);
-        out.writeByte(comparatorType.id());
-        out.writeVInt(entries.length);
-        for (CountEntry entry : entries) {
-            out.writeLong(entry.getTime());
-            out.writeVLong(entry.getCount());
-        }
-    }
-}

+ 0 - 45
src/main/java/org/elasticsearch/search/facet/datehistogram/InternalDateHistogramFacet.java

@@ -1,45 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import org.elasticsearch.search.facet.InternalFacet;
-
-/**
- *
- */
-public abstract class InternalDateHistogramFacet extends InternalFacet implements DateHistogramFacet {
-
-    public static void registerStreams() {
-        InternalCountDateHistogramFacet.registerStreams();
-        InternalFullDateHistogramFacet.registerStreams();
-    }
-
-    protected InternalDateHistogramFacet() {
-    }
-
-    protected InternalDateHistogramFacet(String facetName) {
-        super(facetName);
-    }
-
-    @Override
-    public final String getType() {
-        return TYPE;
-    }
-}

+ 0 - 265
src/main/java/org/elasticsearch/search/facet/datehistogram/InternalFullDateHistogramFacet.java

@@ -1,265 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import com.carrotsearch.hppc.LongObjectOpenHashMap;
-import org.apache.lucene.util.CollectionUtil;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.bytes.HashedBytesArray;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.recycler.Recycler;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentBuilderString;
-import org.elasticsearch.search.facet.Facet;
-
-import java.io.IOException;
-import java.util.*;
-
-/**
- *
- */
-public class InternalFullDateHistogramFacet extends InternalDateHistogramFacet {
-
-    private static final BytesReference STREAM_TYPE = new HashedBytesArray(Strings.toUTF8Bytes("fdHistogram"));
-
-    public static void registerStreams() {
-        Streams.registerStream(STREAM, STREAM_TYPE);
-    }
-
-    static Stream STREAM = new Stream() {
-        @Override
-        public Facet readFacet(StreamInput in) throws IOException {
-            return readHistogramFacet(in);
-        }
-    };
-
-    @Override
-    public BytesReference streamType() {
-        return STREAM_TYPE;
-    }
-
-
-    /**
-     * A histogram entry representing a single entry within the result of a histogram facet.
-     */
-    public static class FullEntry implements Entry {
-        private final long time;
-        long count;
-        long totalCount;
-        double total;
-        double min = Double.POSITIVE_INFINITY;
-        double max = Double.NEGATIVE_INFINITY;
-
-        public FullEntry(long time, long count, double min, double max, long totalCount, double total) {
-            this.time = time;
-            this.count = count;
-            this.min = min;
-            this.max = max;
-            this.totalCount = totalCount;
-            this.total = total;
-        }
-
-        @Override
-        public long getTime() {
-            return time;
-        }
-
-        @Override
-        public long getCount() {
-            return count;
-        }
-
-        @Override
-        public double getTotal() {
-            return total;
-        }
-
-        @Override
-        public long getTotalCount() {
-            return totalCount;
-        }
-
-        @Override
-        public double getMean() {
-            if (totalCount == 0) {
-                return totalCount;
-            }
-            return total / totalCount;
-        }
-
-        @Override
-        public double getMin() {
-            return this.min;
-        }
-
-        @Override
-        public double getMax() {
-            return this.max;
-        }
-    }
-
-    private ComparatorType comparatorType;
-    List<FullEntry> entries;
-
-    InternalFullDateHistogramFacet() {
-    }
-
-    InternalFullDateHistogramFacet(String name) {
-        super(name);
-    }
-
-    public InternalFullDateHistogramFacet(String name, ComparatorType comparatorType, List<FullEntry> entries) {
-        super(name);
-        this.comparatorType = comparatorType;
-        this.entries = entries;
-    }
-
-    @Override
-    public List<FullEntry> getEntries() {
-        return entries;
-    }
-
-    @Override
-    public Iterator<Entry> iterator() {
-        return (Iterator) getEntries().iterator();
-    }
-
-    @Override
-    public Facet reduce(ReduceContext context) {
-        List<Facet> facets = context.facets();
-        if (facets.size() == 1) {
-            // we need to sort it
-            InternalFullDateHistogramFacet internalFacet = (InternalFullDateHistogramFacet) facets.get(0);
-            List<FullEntry> entries = internalFacet.getEntries();
-            CollectionUtil.timSort(entries, comparatorType.comparator());
-            return internalFacet;
-        }
-
-        Recycler.V<LongObjectOpenHashMap<FullEntry>> map = context.cacheRecycler().longObjectMap(-1);
-
-        for (Facet facet : facets) {
-            InternalFullDateHistogramFacet histoFacet = (InternalFullDateHistogramFacet) facet;
-            for (FullEntry fullEntry : histoFacet.entries) {
-                FullEntry current = map.v().get(fullEntry.time);
-                if (current != null) {
-                    current.count += fullEntry.count;
-                    current.total += fullEntry.total;
-                    current.totalCount += fullEntry.totalCount;
-                    if (fullEntry.min < current.min) {
-                        current.min = fullEntry.min;
-                    }
-                    if (fullEntry.max > current.max) {
-                        current.max = fullEntry.max;
-                    }
-                } else {
-                    map.v().put(fullEntry.time, fullEntry);
-                }
-            }
-        }
-
-        // sort
-        // TODO: hppc - not happy with toArray
-        Object[] values = map.v().values().toArray();
-        Arrays.sort(values, (Comparator) comparatorType.comparator());
-        List<FullEntry> ordered = new ArrayList<>(map.v().size());
-        for (int i = 0; i < map.v().size(); i++) {
-            FullEntry value = (FullEntry) values[i];
-            if (value == null) {
-                break;
-            }
-            ordered.add(value);
-        }
-
-        map.close();
-
-        // just initialize it as already ordered facet
-        InternalFullDateHistogramFacet ret = new InternalFullDateHistogramFacet(getName());
-        ret.comparatorType = comparatorType;
-        ret.entries = ordered;
-        return ret;
-    }
-
-    static final class Fields {
-        static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
-        static final XContentBuilderString ENTRIES = new XContentBuilderString("entries");
-        static final XContentBuilderString TIME = new XContentBuilderString("time");
-        static final XContentBuilderString COUNT = new XContentBuilderString("count");
-        static final XContentBuilderString TOTAL = new XContentBuilderString("total");
-        static final XContentBuilderString TOTAL_COUNT = new XContentBuilderString("total_count");
-        static final XContentBuilderString MEAN = new XContentBuilderString("mean");
-        static final XContentBuilderString MIN = new XContentBuilderString("min");
-        static final XContentBuilderString MAX = new XContentBuilderString("max");
-    }
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startObject(getName());
-        builder.field(Fields._TYPE, TYPE);
-        builder.startArray(Fields.ENTRIES);
-        for (Entry entry : getEntries()) {
-            builder.startObject();
-            builder.field(Fields.TIME, entry.getTime());
-            builder.field(Fields.COUNT, entry.getCount());
-            builder.field(Fields.MIN, entry.getMin());
-            builder.field(Fields.MAX, entry.getMax());
-            builder.field(Fields.TOTAL, entry.getTotal());
-            builder.field(Fields.TOTAL_COUNT, entry.getTotalCount());
-            builder.field(Fields.MEAN, entry.getMean());
-            builder.endObject();
-        }
-        builder.endArray();
-        builder.endObject();
-        return builder;
-    }
-
-    public static InternalFullDateHistogramFacet readHistogramFacet(StreamInput in) throws IOException {
-        InternalFullDateHistogramFacet facet = new InternalFullDateHistogramFacet();
-        facet.readFrom(in);
-        return facet;
-    }
-
-    @Override
-    public void readFrom(StreamInput in) throws IOException {
-        super.readFrom(in);
-        comparatorType = ComparatorType.fromId(in.readByte());
-        int size = in.readVInt();
-        entries = new ArrayList<>(size);
-        for (int i = 0; i < size; i++) {
-            entries.add(new FullEntry(in.readLong(), in.readVLong(), in.readDouble(), in.readDouble(), in.readVLong(), in.readDouble()));
-        }
-    }
-
-    @Override
-    public void writeTo(StreamOutput out) throws IOException {
-        super.writeTo(out);
-        out.writeByte(comparatorType.id());
-        out.writeVInt(entries.size());
-        for (FullEntry entry : entries) {
-            out.writeLong(entry.time);
-            out.writeVLong(entry.count);
-            out.writeDouble(entry.min);
-            out.writeDouble(entry.max);
-            out.writeVLong(entry.totalCount);
-            out.writeDouble(entry.total);
-        }
-    }
-}

+ 0 - 150
src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java

@@ -1,150 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import com.carrotsearch.hppc.LongObjectOpenHashMap;
-import org.apache.lucene.index.AtomicReaderContext;
-import org.apache.lucene.index.SortedNumericDocValues;
-import org.elasticsearch.cache.recycler.CacheRecycler;
-import org.elasticsearch.common.recycler.Recycler;
-import org.elasticsearch.common.rounding.Rounding;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
-import org.elasticsearch.search.facet.DoubleFacetAggregatorBase;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.InternalFacet;
-import org.elasticsearch.search.facet.LongFacetAggregatorBase;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-/**
- * A histogram facet collector that uses different fields for the key and the value.
- */
-public class ValueDateHistogramFacetExecutor extends FacetExecutor {
-
-    private final IndexNumericFieldData keyIndexFieldData;
-    private final IndexNumericFieldData valueIndexFieldData;
-    private final DateHistogramFacet.ComparatorType comparatorType;
-    final Rounding tzRounding;
-
-    final Recycler.V<LongObjectOpenHashMap<InternalFullDateHistogramFacet.FullEntry>> entries;
-
-    public ValueDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, Rounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) {
-        this.comparatorType = comparatorType;
-        this.keyIndexFieldData = keyIndexFieldData;
-        this.valueIndexFieldData = valueIndexFieldData;
-        this.tzRounding = tzRounding;
-
-        this.entries = cacheRecycler.longObjectMap(-1);
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector();
-    }
-
-    @Override
-    public InternalFacet buildFacet(String facetName) {
-        ArrayList<InternalFullDateHistogramFacet.FullEntry> entries1 = new ArrayList<>(entries.v().size());
-        final boolean[] states = entries.v().allocated;
-        final Object[] values = entries.v().values;
-        for (int i = 0; i < states.length; i++) {
-            if (states[i]) {
-                InternalFullDateHistogramFacet.FullEntry value  = (InternalFullDateHistogramFacet.FullEntry) values[i];
-                entries1.add(value);
-            }
-        }
-
-        entries.close();
-        return new InternalFullDateHistogramFacet(facetName, comparatorType, entries1);
-    }
-
-    class Collector extends FacetExecutor.Collector {
-
-        private final DateHistogramProc histoProc;
-        private SortedNumericDocValues keyValues;
-
-        public Collector() {
-            this.histoProc = new DateHistogramProc(tzRounding, entries.v());
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            keyValues = keyIndexFieldData.load(context).getLongValues();
-            histoProc.valueValues = valueIndexFieldData.load(context).getDoubleValues();
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-            histoProc.onDoc(doc, keyValues);
-        }
-
-        @Override
-        public void postCollection() {
-        }
-    }
-
-    public static class DateHistogramProc extends LongFacetAggregatorBase {
-
-        final LongObjectOpenHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
-        private final Rounding tzRounding;
-
-        SortedNumericDoubleValues valueValues;
-
-        final ValueAggregator valueAggregator = new ValueAggregator();
-
-        public DateHistogramProc(Rounding tzRounding, LongObjectOpenHashMap<InternalFullDateHistogramFacet.FullEntry> entries) {
-            this.tzRounding = tzRounding;
-            this.entries = entries;
-        }
-
-        @Override
-        public void onValue(int docId, long value) {
-            long time = tzRounding.round(value);
-
-            InternalFullDateHistogramFacet.FullEntry entry = entries.get(time);
-            if (entry == null) {
-                entry = new InternalFullDateHistogramFacet.FullEntry(time, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0, 0);
-                entries.put(time, entry);
-            }
-            entry.count++;
-            valueAggregator.entry = entry;
-            valueAggregator.onDoc(docId, valueValues);
-        }
-
-        public final static class ValueAggregator extends DoubleFacetAggregatorBase {
-
-            InternalFullDateHistogramFacet.FullEntry entry;
-
-            @Override
-            public void onValue(int docId, double value) {
-                entry.totalCount++;
-                entry.total += value;
-                if (value < entry.min) {
-                    entry.min = value;
-                }
-                if (value > entry.max) {
-                    entry.max = value;
-                }
-            }
-        }
-    }
-}

+ 0 - 147
src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java

@@ -1,147 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.datehistogram;
-
-import com.carrotsearch.hppc.LongObjectOpenHashMap;
-import org.apache.lucene.index.AtomicReaderContext;
-import org.apache.lucene.index.SortedNumericDocValues;
-import org.apache.lucene.search.Scorer;
-import org.elasticsearch.cache.recycler.CacheRecycler;
-import org.elasticsearch.common.recycler.Recycler;
-import org.elasticsearch.common.rounding.Rounding;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.script.SearchScript;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.InternalFacet;
-import org.elasticsearch.search.facet.LongFacetAggregatorBase;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-/**
- * A histogram facet collector that uses the same field as the key as well as the
- * value.
- */
-public class ValueScriptDateHistogramFacetExecutor extends FacetExecutor {
-
-    private final IndexNumericFieldData keyIndexFieldData;
-    private final DateHistogramFacet.ComparatorType comparatorType;
-    final SearchScript valueScript;
-    final Rounding tzRounding;
-
-    final Recycler.V<LongObjectOpenHashMap<InternalFullDateHistogramFacet.FullEntry>> entries;
-
-    public ValueScriptDateHistogramFacetExecutor(IndexNumericFieldData keyIndexFieldData, SearchScript valueScript, Rounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, CacheRecycler cacheRecycler) {
-        this.comparatorType = comparatorType;
-        this.keyIndexFieldData = keyIndexFieldData;
-        this.valueScript = valueScript;
-        this.tzRounding = tzRounding;
-
-        this.entries = cacheRecycler.longObjectMap(-1);
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector();
-    }
-
-    @Override
-    public InternalFacet buildFacet(String facetName) {
-        ArrayList<InternalFullDateHistogramFacet.FullEntry> entries1 = new ArrayList<>(entries.v().size());
-        final boolean[] states = entries.v().allocated;
-        final Object[] values = entries.v().values;
-        for (int i = 0; i < states.length; i++) {
-            if (states[i]) {
-                InternalFullDateHistogramFacet.FullEntry value = (InternalFullDateHistogramFacet.FullEntry) values[i];
-                entries1.add(value);
-            }
-        }
-
-        entries.close();
-        return new InternalFullDateHistogramFacet(facetName, comparatorType, entries1);
-    }
-
-    class Collector extends FacetExecutor.Collector {
-
-        private final DateHistogramProc histoProc;
-        private SortedNumericDocValues keyValues;
-
-        public Collector() {
-            histoProc = new DateHistogramProc(tzRounding, valueScript, entries.v());
-        }
-
-        @Override
-        public void setScorer(Scorer scorer) throws IOException {
-            valueScript.setScorer(scorer);
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            keyValues = keyIndexFieldData.load(context).getLongValues();
-            valueScript.setNextReader(context);
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-            histoProc.onDoc(doc, keyValues);
-        }
-
-        @Override
-        public void postCollection() {
-        }
-    }
-
-    public static class DateHistogramProc extends LongFacetAggregatorBase {
-
-        private final Rounding tzRounding;
-        protected final SearchScript valueScript;
-
-        final LongObjectOpenHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
-
-        public DateHistogramProc(Rounding tzRounding, SearchScript valueScript, final LongObjectOpenHashMap<InternalFullDateHistogramFacet.FullEntry> entries) {
-            this.tzRounding = tzRounding;
-            this.valueScript = valueScript;
-            this.entries = entries;
-        }
-
-        @Override
-        public void onValue(int docId, long value) {
-            valueScript.setNextDocId(docId);
-            long time = tzRounding.round(value);
-            double scriptValue = valueScript.runAsDouble();
-
-            InternalFullDateHistogramFacet.FullEntry entry = entries.get(time);
-            if (entry == null) {
-                entry = new InternalFullDateHistogramFacet.FullEntry(time, 1, scriptValue, scriptValue, 1, scriptValue);
-                entries.put(time, entry);
-            } else {
-                entry.count++;
-                entry.totalCount++;
-                entry.total += scriptValue;
-                if (scriptValue < entry.min) {
-                    entry.min = scriptValue;
-                }
-                if (scriptValue > entry.max) {
-                    entry.max = scriptValue;
-                }
-            }
-        }
-    }
-}

+ 0 - 38
src/main/java/org/elasticsearch/search/facet/filter/FilterFacet.java

@@ -1,38 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.filter;
-
-import org.elasticsearch.search.facet.Facet;
-
-/**
- * A query facets returns the count (number of hits) for a facet based on a query.
- */
-public interface FilterFacet extends Facet {
-
-    /**
-     * The type of the filter facet.
-     */
-    public static final String TYPE = "filter";
-
-    /**
-     * The count of the facet.
-     */
-    long getCount();
-}

+ 0 - 82
src/main/java/org/elasticsearch/search/facet/filter/FilterFacetBuilder.java

@@ -1,82 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.filter;
-
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.query.FilterBuilder;
-import org.elasticsearch.search.builder.SearchSourceBuilderException;
-import org.elasticsearch.search.facet.FacetBuilder;
-
-import java.io.IOException;
-
-/**
- *
- */
-public class FilterFacetBuilder extends FacetBuilder {
-
-    private FilterBuilder filter;
-
-    public FilterFacetBuilder(String name) {
-        super(name);
-    }
-
-    /**
-     * Marks the facet to run in a global scope, not bounded by any query.
-     */
-    @Override
-    public FilterFacetBuilder global(boolean global) {
-        super.global(global);
-        return this;
-    }
-
-    public FilterFacetBuilder facetFilter(FilterBuilder filter) {
-        this.facetFilter = filter;
-        return this;
-    }
-
-    /**
-     * Sets the nested path the facet will execute on. A match (root object) will then cause all the
-     * nested objects matching the path to be computed into the facet.
-     */
-    public FilterFacetBuilder nested(String nested) {
-        this.nested = nested;
-        return this;
-    }
-
-    public FilterFacetBuilder filter(FilterBuilder filter) {
-        this.filter = filter;
-        return this;
-    }
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        if (filter == null) {
-            throw new SearchSourceBuilderException("filter must be set on filter facet for facet [" + name + "]");
-        }
-        builder.startObject(name);
-        builder.field(FilterFacet.TYPE);
-        filter.toXContent(builder, params);
-
-        addFilterFacetAndGlobal(builder, params);
-
-        builder.endObject();
-        return builder;
-    }
-}

+ 0 - 106
src/main/java/org/elasticsearch/search/facet/filter/FilterFacetExecutor.java

@@ -1,106 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.filter;
-
-import org.apache.lucene.index.AtomicReaderContext;
-import org.apache.lucene.search.DocIdSet;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.Filter;
-import org.apache.lucene.util.Bits;
-import org.elasticsearch.common.lucene.docset.AndDocIdSet;
-import org.elasticsearch.common.lucene.docset.ContextDocIdSet;
-import org.elasticsearch.common.lucene.docset.DocIdSets;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.InternalFacet;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- *
- */
-public class FilterFacetExecutor extends FacetExecutor {
-
-    private final Filter filter;
-
-    long count = -1;
-
-    public FilterFacetExecutor(Filter filter) {
-        this.filter = filter;
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector();
-    }
-
-    @Override
-    public Post post() {
-        return new Post();
-    }
-
-    @Override
-    public InternalFacet buildFacet(String facetName) {
-        return new InternalFilterFacet(facetName, count);
-    }
-
-    class Post extends FacetExecutor.Post {
-
-        @Override
-        public void executePost(List<ContextDocIdSet> docSets) throws IOException {
-            int count = 0;
-            for (ContextDocIdSet docSet : docSets) {
-                DocIdSet filteredDocIdSet = filter.getDocIdSet(docSet.context, docSet.context.reader().getLiveDocs());
-                if (filteredDocIdSet == null || docSet.docSet == null) {
-                    continue;
-                }
-                DocIdSetIterator iter = new AndDocIdSet(new DocIdSet[]{docSet.docSet, filteredDocIdSet}).iterator();
-                while (iter.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
-                    count++;
-                }
-            }
-            FilterFacetExecutor.this.count = count;
-        }
-    }
-
-    class Collector extends FacetExecutor.Collector {
-
-        private long count = 0;
-        private Bits bits;
-
-        @Override
-        public void collect(int doc) throws IOException {
-            if (bits.get(doc)) {
-                count++;
-            }
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            bits = DocIdSets.toSafeBits(context.reader(), filter.getDocIdSet(context, context.reader().getLiveDocs()));
-        }
-
-        @Override
-        public void postCollection() {
-            bits = null;
-            FilterFacetExecutor.this.count = count;
-        }
-    }
-}

+ 0 - 64
src/main/java/org/elasticsearch/search/facet/filter/FilterFacetParser.java

@@ -1,64 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet.filter;
-
-import org.elasticsearch.common.component.AbstractComponent;
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.lucene.search.Queries;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.query.ParsedFilter;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.FacetParser;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-
-/**
- *
- */
-public class FilterFacetParser extends AbstractComponent implements FacetParser {
-
-    @Inject
-    public FilterFacetParser(Settings settings) {
-        super(settings);
-        InternalFilterFacet.registerStreams();
-    }
-
-    @Override
-    public String[] types() {
-        return new String[]{FilterFacet.TYPE};
-    }
-
-    @Override
-    public FacetExecutor.Mode defaultMainMode() {
-        return FacetExecutor.Mode.COLLECTOR;
-    }
-
-    @Override
-    public FacetExecutor.Mode defaultGlobalMode() {
-        return FacetExecutor.Mode.POST;
-    }
-
-    @Override
-    public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
-        ParsedFilter parsedFilter = context.queryParserService().parseInnerFilter(parser);
-        return new FilterFacetExecutor(parsedFilter == null ? Queries.MATCH_ALL_FILTER : parsedFilter.filter());
-    }
-}

+ 0 - 124
src/main/java/org/elasticsearch/search/facet/filter/InternalFilterFacet.java

@@ -1,124 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.filter;
-
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.bytes.HashedBytesArray;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentBuilderString;
-import org.elasticsearch.search.facet.Facet;
-import org.elasticsearch.search.facet.InternalFacet;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- *
- */
-public class InternalFilterFacet extends InternalFacet implements FilterFacet {
-
-    private static final BytesReference STREAM_TYPE = new HashedBytesArray(Strings.toUTF8Bytes("filter"));
-
-    public static void registerStreams() {
-        Streams.registerStream(STREAM, STREAM_TYPE);
-    }
-
-    static Stream STREAM = new Stream() {
-        @Override
-        public Facet readFacet(StreamInput in) throws IOException {
-            return readFilterFacet(in);
-        }
-    };
-
-    @Override
-    public BytesReference streamType() {
-        return STREAM_TYPE;
-    }
-
-    private long count;
-
-    InternalFilterFacet() {
-    }
-
-    public InternalFilterFacet(String name, long count) {
-        super(name);
-        this.count = count;
-    }
-
-    @Override
-    public String getType() {
-        return TYPE;
-    }
-
-    /**
-     * The count of the facet.
-     */
-    public long getCount() {
-        return count;
-    }
-
-    @Override
-    public Facet reduce(ReduceContext context) {
-        List<Facet> facets = context.facets();
-        if (facets.size() == 1) {
-            return facets.get(0);
-        }
-        long count = 0;
-        for (Facet facet : facets) {
-            count += ((FilterFacet) facet).getCount();
-        }
-        return new InternalFilterFacet(getName(), count);
-    }
-
-    static final class Fields {
-        static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
-        static final XContentBuilderString COUNT = new XContentBuilderString("count");
-    }
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startObject(getName());
-        builder.field(Fields._TYPE, FilterFacet.TYPE);
-        builder.field(Fields.COUNT, count);
-        builder.endObject();
-        return builder;
-    }
-
-    public static FilterFacet readFilterFacet(StreamInput in) throws IOException {
-        InternalFilterFacet result = new InternalFilterFacet();
-        result.readFrom(in);
-        return result;
-    }
-
-    @Override
-    public void readFrom(StreamInput in) throws IOException {
-        super.readFrom(in);
-        count = in.readVLong();
-    }
-
-    @Override
-    public void writeTo(StreamOutput out) throws IOException {
-        super.writeTo(out);
-        out.writeVLong(count);
-    }
-}

+ 0 - 107
src/main/java/org/elasticsearch/search/facet/geodistance/GeoDistanceFacet.java

@@ -1,107 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.geodistance;
-
-import org.elasticsearch.search.facet.Facet;
-
-import java.util.List;
-
-/**
- *
- */
-public interface GeoDistanceFacet extends Facet, Iterable<GeoDistanceFacet.Entry> {
-
-    /**
-     * The type of the filter facet.
-     */
-    public static final String TYPE = "geo_distance";
-
-    /**
-     * An ordered list of geo distance facet entries.
-     */
-    List<Entry> getEntries();
-
-    public class Entry {
-
-        double from = Double.NEGATIVE_INFINITY;
-        double to = Double.POSITIVE_INFINITY;
-        long count;
-        long totalCount;
-        double total;
-        double min = Double.POSITIVE_INFINITY;
-        double max = Double.NEGATIVE_INFINITY;
-
-        /**
-         * internal field used to see if this entry was already found for a doc
-         */
-        boolean foundInDoc = false;
-
-        Entry() {
-        }
-
-        public Entry(double from, double to, long count, long totalCount, double total, double min, double max) {
-            this.from = from;
-            this.to = to;
-            this.count = count;
-            this.totalCount = totalCount;
-            this.total = total;
-            this.min = min;
-            this.max = max;
-        }
-
-        public double getFrom() {
-            return this.from;
-        }
-
-        public double getTo() {
-            return this.to;
-        }
-
-        public long getCount() {
-            return this.count;
-        }
-
-        public long getTotalCount() {
-            return this.totalCount;
-        }
-
-        public double getTotal() {
-            return this.total;
-        }
-
-        /**
-         * The mean of this facet interval.
-         */
-        public double getMean() {
-            if (totalCount == 0) {
-                return 0;
-            }
-            return total / totalCount;
-        }
-
-        public double getMin() {
-            return this.min;
-        }
-
-        public double getMax() {
-            return this.max;
-        }
-    }
-}

+ 0 - 290
src/main/java/org/elasticsearch/search/facet/geodistance/GeoDistanceFacetBuilder.java

@@ -1,290 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.geodistance;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import org.elasticsearch.common.geo.GeoDistance;
-import org.elasticsearch.common.unit.DistanceUnit;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.query.FilterBuilder;
-import org.elasticsearch.search.builder.SearchSourceBuilderException;
-import org.elasticsearch.search.facet.FacetBuilder;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-
-/**
- * A geo distance builder allowing to create a facet of distances from a specific location including the
- * number of hits within each distance range, and aggregated data (like totals of either the distance or
- * cusotm value fields).
- */
-public class GeoDistanceFacetBuilder extends FacetBuilder {
-
-    private String fieldName;
-
-    private String valueFieldName;
-
-    private double lat;
-
-    private double lon;
-
-    private String geohash;
-
-    private GeoDistance geoDistance;
-
-    private DistanceUnit unit;
-
-    private Map<String, Object> params;
-
-    private String valueScript;
-
-    private String lang;
-
-    private List<Entry> entries = Lists.newArrayList();
-
-    /**
-     * Constructs a new geo distance with the provided facet name.
-     */
-    public GeoDistanceFacetBuilder(String name) {
-        super(name);
-    }
-
-    /**
-     * The geo point field that will be used to extract the document location(s).
-     */
-    public GeoDistanceFacetBuilder field(String fieldName) {
-        this.fieldName = fieldName;
-        return this;
-    }
-
-    /**
-     * A custom value field (numeric) that will be used to provide aggregated data for each facet (for example, total).
-     */
-    public GeoDistanceFacetBuilder valueField(String valueFieldName) {
-        this.valueFieldName = valueFieldName;
-        return this;
-    }
-
-    /**
-     * A custom value script (result is numeric) that will be used to provide aggregated data for each facet (for example, total).
-     */
-    public GeoDistanceFacetBuilder valueScript(String valueScript) {
-        this.valueScript = valueScript;
-        return this;
-    }
-
-    /**
-     * The language of the {@link #valueScript(String)} script.
-     */
-    public GeoDistanceFacetBuilder lang(String lang) {
-        this.lang = lang;
-        return this;
-    }
-
-    /**
-     * Parameters for {@link #valueScript(String)} to improve performance when executing the same script with different parameters.
-     */
-    public GeoDistanceFacetBuilder scriptParam(String name, Object value) {
-        if (params == null) {
-            params = Maps.newHashMap();
-        }
-        params.put(name, value);
-        return this;
-    }
-
-    /**
-     * The point to create the range distance facets from.
-     *
-     * @param lat latitude.
-     * @param lon longitude.
-     */
-    public GeoDistanceFacetBuilder point(double lat, double lon) {
-        this.lat = lat;
-        this.lon = lon;
-        return this;
-    }
-
-    /**
-     * The latitude to create the range distance facets from.
-     */
-    public GeoDistanceFacetBuilder lat(double lat) {
-        this.lat = lat;
-        return this;
-    }
-
-    /**
-     * The longitude to create the range distance facets from.
-     */
-    public GeoDistanceFacetBuilder lon(double lon) {
-        this.lon = lon;
-        return this;
-    }
-
-    /**
-     * The geohash of the geo point to create the range distance facets from.
-     */
-    public GeoDistanceFacetBuilder geohash(String geohash) {
-        this.geohash = geohash;
-        return this;
-    }
-
-    /**
-     * The geo distance type used to compute the distance.
-     */
-    public GeoDistanceFacetBuilder geoDistance(GeoDistance geoDistance) {
-        this.geoDistance = geoDistance;
-        return this;
-    }
-
-    /**
-     * Adds a range entry with explicit from and to.
-     *
-     * @param from The from distance limit
-     * @param to   The to distance limit
-     */
-    public GeoDistanceFacetBuilder addRange(double from, double to) {
-        entries.add(new Entry(from, to));
-        return this;
-    }
-
-    /**
-     * Adds a range entry with explicit from and unbounded to.
-     *
-     * @param from the from distance limit, to is unbounded.
-     */
-    public GeoDistanceFacetBuilder addUnboundedTo(double from) {
-        entries.add(new Entry(from, Double.POSITIVE_INFINITY));
-        return this;
-    }
-
-    /**
-     * Adds a range entry with explicit to and unbounded from.
-     *
-     * @param to the to distance limit, from is unbounded.
-     */
-    public GeoDistanceFacetBuilder addUnboundedFrom(double to) {
-        entries.add(new Entry(Double.NEGATIVE_INFINITY, to));
-        return this;
-    }
-
-    /**
-     * The distance unit to use. Defaults to {@link org.elasticsearch.common.unit.DistanceUnit#KILOMETERS}
-     */
-    public GeoDistanceFacetBuilder unit(DistanceUnit unit) {
-        this.unit = unit;
-        return this;
-    }
-
-    /**
-     * Marks the facet to run in a global scope, not bounded by any query.
-     */
-    public GeoDistanceFacetBuilder global(boolean global) {
-        super.global(global);
-        return this;
-    }
-
-    public GeoDistanceFacetBuilder facetFilter(FilterBuilder filter) {
-        this.facetFilter = filter;
-        return this;
-    }
-
-    /**
-     * Sets the nested path the facet will execute on. A match (root object) will then cause all the
-     * nested objects matching the path to be computed into the facet.
-     */
-    public GeoDistanceFacetBuilder nested(String nested) {
-        this.nested = nested;
-        return this;
-    }
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        if (fieldName == null) {
-            throw new SearchSourceBuilderException("field must be set on geo_distance facet for facet [" + name + "]");
-        }
-        if (entries.isEmpty()) {
-            throw new SearchSourceBuilderException("at least one range must be defined for geo_distance facet [" + name + "]");
-        }
-
-        builder.startObject(name);
-
-        builder.startObject(GeoDistanceFacet.TYPE);
-
-        if (geohash != null) {
-            builder.field(fieldName, geohash);
-        } else {
-            builder.startArray(fieldName).value(lon).value(lat).endArray();
-        }
-
-        if (valueFieldName != null) {
-            builder.field("value_field", valueFieldName);
-        }
-
-        if (valueScript != null) {
-            builder.field("value_script", valueScript);
-            if (lang != null) {
-                builder.field("lang", lang);
-            }
-            if (this.params != null) {
-                builder.field("params", this.params);
-            }
-        }
-
-        builder.startArray("ranges");
-        for (Entry entry : entries) {
-            builder.startObject();
-            if (!Double.isInfinite(entry.from)) {
-                builder.field("from", entry.from);
-            }
-            if (!Double.isInfinite(entry.to)) {
-                builder.field("to", entry.to);
-            }
-            builder.endObject();
-        }
-        builder.endArray();
-
-        if (unit != null) {
-            builder.field("unit", unit);
-        }
-        if (geoDistance != null) {
-            builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
-        }
-
-        builder.endObject();
-
-        addFilterFacetAndGlobal(builder, params);
-
-        builder.endObject();
-        return builder;
-    }
-
-    private static class Entry {
-        final double from;
-        final double to;
-
-        private Entry(double from, double to) {
-            this.from = from;
-            this.to = to;
-        }
-    }
-}

+ 0 - 138
src/main/java/org/elasticsearch/search/facet/geodistance/GeoDistanceFacetExecutor.java

@@ -1,138 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.geodistance;
-
-import org.apache.lucene.index.AtomicReaderContext;
-import org.elasticsearch.common.geo.GeoDistance;
-import org.elasticsearch.common.geo.GeoPoint;
-import org.elasticsearch.common.unit.DistanceUnit;
-import org.elasticsearch.index.fielddata.MultiGeoPointValues;
-import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.InternalFacet;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-
-/**
- *
- */
-public class GeoDistanceFacetExecutor extends FacetExecutor {
-
-    final IndexGeoPointFieldData indexFieldData;
-    final double lat;
-    final double lon;
-    final DistanceUnit unit;
-    final GeoDistance geoDistance;
-    final GeoDistance.FixedSourceDistance fixedSourceDistance;
-
-    final GeoDistanceFacet.Entry[] entries;
-
-    public GeoDistanceFacetExecutor(IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
-                                    GeoDistanceFacet.Entry[] entries, SearchContext context) {
-        this.lat = lat;
-        this.lon = lon;
-        this.unit = unit;
-        this.entries = entries;
-        this.geoDistance = geoDistance;
-        this.indexFieldData = indexFieldData;
-        this.fixedSourceDistance = geoDistance.fixedSourceDistance(lat, lon, unit);
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector(new Aggregator(fixedSourceDistance, entries));
-    }
-
-    @Override
-    public InternalFacet buildFacet(String facetName) {
-        return new InternalGeoDistanceFacet(facetName, entries);
-    }
-
-    class Collector extends FacetExecutor.Collector {
-
-        protected MultiGeoPointValues values;
-        protected final Aggregator aggregator;
-
-        Collector(Aggregator aggregator) {
-            this.aggregator = aggregator;
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            values = indexFieldData.load(context).getGeoPointValues();
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-            for (GeoDistanceFacet.Entry entry : entries) {
-                entry.foundInDoc = false;
-            }
-            this.aggregator.onDoc(doc, values);
-        }
-
-        @Override
-        public void postCollection() {
-        }
-    }
-
-    public static class Aggregator {
-
-        protected final GeoDistance.FixedSourceDistance fixedSourceDistance;
-
-        protected final GeoDistanceFacet.Entry[] entries;
-
-        public Aggregator(GeoDistance.FixedSourceDistance fixedSourceDistance, GeoDistanceFacet.Entry[] entries) {
-            this.fixedSourceDistance = fixedSourceDistance;
-            this.entries = entries;
-        }
-        
-        public void onDoc(int docId, MultiGeoPointValues values) {
-            values.setDocument(docId);
-            final int length = values.count();
-            for (int i = 0; i < length; i++) {
-                final GeoPoint next = values.valueAt(i);
-                double distance = fixedSourceDistance.calculate(next.getLat(), next.getLon());
-                for (GeoDistanceFacet.Entry entry : entries) {
-                    if (entry.foundInDoc) {
-                        continue;
-                    }
-                    if (distance >= entry.getFrom() && distance < entry.getTo()) {
-                       entry.foundInDoc = true;
-                       collectGeoPoint(entry, docId, distance);
-                    }
-                }
-            }
-        }
-        
-        protected void collectGeoPoint(GeoDistanceFacet.Entry entry, int docId, double distance) {
-            entry.count++;
-            entry.totalCount++;
-            entry.total += distance;
-            if (distance < entry.min) {
-                entry.min = distance;
-            }
-            if (distance > entry.max) {
-                entry.max = distance;
-            }
-        }
-        
-    }
-}

+ 0 - 187
src/main/java/org/elasticsearch/search/facet/geodistance/GeoDistanceFacetParser.java

@@ -1,187 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet.geodistance;
-
-import com.google.common.collect.Lists;
-import org.elasticsearch.common.component.AbstractComponent;
-import org.elasticsearch.common.geo.GeoDistance;
-import org.elasticsearch.common.geo.GeoPoint;
-import org.elasticsearch.common.geo.GeoUtils;
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.DistanceUnit;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.index.mapper.FieldMapper;
-import org.elasticsearch.script.ScriptService;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.FacetParser;
-import org.elasticsearch.search.facet.FacetPhaseExecutionException;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-/**
- *
- */
-public class GeoDistanceFacetParser extends AbstractComponent implements FacetParser {
-
-    @Inject
-    public GeoDistanceFacetParser(Settings settings) {
-        super(settings);
-        InternalGeoDistanceFacet.registerStreams();
-    }
-
-    @Override
-    public String[] types() {
-        return new String[]{GeoDistanceFacet.TYPE, "geoDistance"};
-    }
-
-    @Override
-    public FacetExecutor.Mode defaultMainMode() {
-        return FacetExecutor.Mode.COLLECTOR;
-    }
-
-    @Override
-    public FacetExecutor.Mode defaultGlobalMode() {
-        return FacetExecutor.Mode.COLLECTOR;
-    }
-
-    @Override
-    public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
-        String fieldName = null;
-        String valueFieldName = null;
-        String valueScript = null;
-        ScriptService.ScriptType scriptType = null;
-        String scriptLang = null;
-        Map<String, Object> params = null;
-        GeoPoint point = new GeoPoint();
-        DistanceUnit unit = DistanceUnit.DEFAULT;
-        GeoDistance geoDistance = GeoDistance.DEFAULT;
-        List<GeoDistanceFacet.Entry> entries = Lists.newArrayList();
-
-        boolean normalizeLon = true;
-        boolean normalizeLat = true;
-
-        XContentParser.Token token;
-        String currentName = parser.currentName();
-
-        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
-            if (token == XContentParser.Token.FIELD_NAME) {
-                currentName = parser.currentName();
-            } else if (token == XContentParser.Token.START_ARRAY) {
-                if ("ranges".equals(currentName) || "entries".equals(currentName)) {
-                    // "ranges" : [
-                    //     { "from" : 0, "to" : 12.5 }
-                    //     { "from" : 12.5 }
-                    // ]
-                    while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
-                        double from = Double.NEGATIVE_INFINITY;
-                        double to = Double.POSITIVE_INFINITY;
-                        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
-                            if (token == XContentParser.Token.FIELD_NAME) {
-                                currentName = parser.currentName();
-                            } else if (token.isValue()) {
-                                if ("from".equals(currentName)) {
-                                    from = parser.doubleValue();
-                                } else if ("to".equals(currentName)) {
-                                    to = parser.doubleValue();
-                                }
-                            }
-                        }
-                        entries.add(new GeoDistanceFacet.Entry(from, to, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY));
-                    }
-                } else {
-                    GeoUtils.parseGeoPoint(parser, point);
-                    fieldName = currentName;
-                }
-            } else if (token == XContentParser.Token.START_OBJECT) {
-                if ("params".equals(currentName)) {
-                    params = parser.map();
-                } else {
-                    // the json in the format of -> field : { lat : 30, lon : 12 }
-                    fieldName = currentName;
-                    GeoUtils.parseGeoPoint(parser, point);
-                }
-            } else if (token.isValue()) {
-                if (currentName.equals("unit")) {
-                    unit = DistanceUnit.fromString(parser.text());
-                } else if (currentName.equals("distance_type") || currentName.equals("distanceType")) {
-                    geoDistance = GeoDistance.fromString(parser.text());
-                } else if ("value_field".equals(currentName) || "valueField".equals(currentName)) {
-                    valueFieldName = parser.text();
-                } else if (ScriptService.VALUE_SCRIPT_INLINE.match(currentName)) {
-                    valueScript = parser.text();
-                    scriptType = ScriptService.ScriptType.INLINE;
-                } else if (ScriptService.VALUE_SCRIPT_ID.match(currentName)) {
-                    valueScript = parser.text();
-                    scriptType = ScriptService.ScriptType.INDEXED;
-                } else if (ScriptService.VALUE_SCRIPT_FILE.match(currentName)) {
-                    valueScript = parser.text();
-                    scriptType = ScriptService.ScriptType.FILE;
-                } else if (ScriptService.SCRIPT_LANG.match(currentName)) {
-                    scriptLang = parser.text();
-                } else if ("normalize".equals(currentName)) {
-                    normalizeLat = parser.booleanValue();
-                    normalizeLon = parser.booleanValue();
-                } else {
-                    // assume the value is the actual value
-                    point.resetFromString(parser.text());
-
-                    fieldName = currentName;
-                }
-            }
-        }
-
-        if (entries.isEmpty()) {
-            throw new FacetPhaseExecutionException(facetName, "no ranges defined for geo_distance facet");
-        }
-
-        if (normalizeLat || normalizeLon) {
-            GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
-        }
-
-        FieldMapper keyFieldMapper = context.smartNameFieldMapper(fieldName);
-        if (keyFieldMapper == null) {
-            throw new FacetPhaseExecutionException(facetName, "failed to find mapping for [" + fieldName + "]");
-        }
-        IndexGeoPointFieldData keyIndexFieldData = context.fieldData().getForField(keyFieldMapper);
-
-        if (valueFieldName != null) {
-            FieldMapper valueFieldMapper = context.smartNameFieldMapper(valueFieldName);
-            if (valueFieldMapper == null) {
-                throw new FacetPhaseExecutionException(facetName, "failed to find mapping for [" + valueFieldName + "]");
-            }
-            IndexNumericFieldData valueIndexFieldData = context.fieldData().getForField(valueFieldMapper);
-            return new ValueGeoDistanceFacetExecutor(keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
-                    context, valueIndexFieldData);
-        }
-
-        if (valueScript != null) {
-            return new ScriptGeoDistanceFacetExecutor(keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
-                    context, scriptLang, valueScript, scriptType, params);
-        }
-
-        return new GeoDistanceFacetExecutor(keyIndexFieldData, point.lat(), point.lon(), unit, geoDistance, entries.toArray(new GeoDistanceFacet.Entry[entries.size()]),
-                context);
-    }
-}

+ 0 - 180
src/main/java/org/elasticsearch/search/facet/geodistance/InternalGeoDistanceFacet.java

@@ -1,180 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.geodistance;
-
-import com.google.common.collect.ImmutableList;
-
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.bytes.HashedBytesArray;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentBuilderString;
-import org.elasticsearch.search.facet.Facet;
-import org.elasticsearch.search.facet.InternalFacet;
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- *
- */
-public class InternalGeoDistanceFacet extends InternalFacet implements GeoDistanceFacet {
-
-    private static final BytesReference STREAM_TYPE = new HashedBytesArray(Strings.toUTF8Bytes("geoDistance"));
-
-    public static void registerStreams() {
-        Streams.registerStream(STREAM, STREAM_TYPE);
-    }
-
-    static Stream STREAM = new Stream() {
-        @Override
-        public Facet readFacet(StreamInput in) throws IOException {
-            return readGeoDistanceFacet(in);
-        }
-    };
-
-    @Override
-    public BytesReference streamType() {
-        return STREAM_TYPE;
-    }
-
-    Entry[] entries;
-
-    InternalGeoDistanceFacet() {
-    }
-
-    public InternalGeoDistanceFacet(String name, Entry[] entries) {
-        super(name);
-        this.entries = entries;
-    }
-
-    @Override
-    public String getType() {
-        return TYPE;
-    }
-
-    @Override
-    public List<Entry> getEntries() {
-        return ImmutableList.copyOf(entries);
-    }
-
-    @Override
-    public Iterator<Entry> iterator() {
-        return getEntries().iterator();
-    }
-
-    public Facet reduce(ReduceContext context) {
-        List<Facet> facets = context.facets();
-        if (facets.size() == 1) {
-            return facets.get(0);
-        }
-        InternalGeoDistanceFacet agg = (InternalGeoDistanceFacet) facets.get(0);
-        for (int i = 1; i < facets.size(); i++) {
-            InternalGeoDistanceFacet geoDistanceFacet = (InternalGeoDistanceFacet) facets.get(i);
-            for (int j = 0; j < geoDistanceFacet.entries.length; j++) {
-                GeoDistanceFacet.Entry aggEntry = agg.entries[j];
-                GeoDistanceFacet.Entry currentEntry = geoDistanceFacet.entries[j];
-                aggEntry.count += currentEntry.count;
-                aggEntry.totalCount += currentEntry.totalCount;
-                aggEntry.total += currentEntry.total;
-                if (currentEntry.min < aggEntry.min) {
-                    aggEntry.min = currentEntry.min;
-                }
-                if (currentEntry.max > aggEntry.max) {
-                    aggEntry.max = currentEntry.max;
-                }
-            }
-        }
-        return agg;
-    }
-
-    public static InternalGeoDistanceFacet readGeoDistanceFacet(StreamInput in) throws IOException {
-        InternalGeoDistanceFacet facet = new InternalGeoDistanceFacet();
-        facet.readFrom(in);
-        return facet;
-    }
-
-    @Override
-    public void readFrom(StreamInput in) throws IOException {
-        super.readFrom(in);
-        entries = new Entry[in.readVInt()];
-        for (int i = 0; i < entries.length; i++) {
-            entries[i] = new Entry(in.readDouble(), in.readDouble(), in.readVLong(), in.readVLong(), in.readDouble(), in.readDouble(), in.readDouble());
-        }
-    }
-
-    @Override
-    public void writeTo(StreamOutput out) throws IOException {
-        super.writeTo(out);
-        out.writeVInt(entries.length);
-        for (Entry entry : entries) {
-            out.writeDouble(entry.from);
-            out.writeDouble(entry.to);
-            out.writeVLong(entry.count);
-            out.writeVLong(entry.totalCount);
-            out.writeDouble(entry.total);
-            out.writeDouble(entry.min);
-            out.writeDouble(entry.max);
-        }
-    }
-
-
-    static final class Fields {
-        static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
-        static final XContentBuilderString RANGES = new XContentBuilderString("ranges");
-        static final XContentBuilderString FROM = new XContentBuilderString("from");
-        static final XContentBuilderString TO = new XContentBuilderString("to");
-        static final XContentBuilderString COUNT = new XContentBuilderString("count");
-        static final XContentBuilderString TOTAL_COUNT = new XContentBuilderString("total_count");
-        static final XContentBuilderString TOTAL = new XContentBuilderString("total");
-        static final XContentBuilderString MEAN = new XContentBuilderString("mean");
-        static final XContentBuilderString MIN = new XContentBuilderString("min");
-        static final XContentBuilderString MAX = new XContentBuilderString("max");
-    }
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startObject(getName());
-        builder.field(Fields._TYPE, GeoDistanceFacet.TYPE);
-        builder.startArray(Fields.RANGES);
-        for (Entry entry : entries) {
-            builder.startObject();
-            if (!Double.isInfinite(entry.from)) {
-                builder.field(Fields.FROM, entry.from);
-            }
-            if (!Double.isInfinite(entry.to)) {
-                builder.field(Fields.TO, entry.to);
-            }
-            builder.field(Fields.COUNT, entry.getCount());
-            builder.field(Fields.MIN, entry.getMin());
-            builder.field(Fields.MAX, entry.getMax());
-            builder.field(Fields.TOTAL_COUNT, entry.getTotalCount());
-            builder.field(Fields.TOTAL, entry.getTotal());
-            builder.field(Fields.MEAN, entry.getMean());
-            builder.endObject();
-        }
-        builder.endArray();
-        builder.endObject();
-        return builder;
-    }
-}

+ 0 - 105
src/main/java/org/elasticsearch/search/facet/geodistance/ScriptGeoDistanceFacetExecutor.java

@@ -1,105 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.geodistance;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.lucene.index.AtomicReaderContext;
-import org.apache.lucene.search.Scorer;
-import org.elasticsearch.common.geo.GeoDistance;
-import org.elasticsearch.common.unit.DistanceUnit;
-import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
-import org.elasticsearch.script.ScriptService;
-import org.elasticsearch.script.SearchScript;
-import org.elasticsearch.search.internal.SearchContext;
-
-/**
- *
- */
-public class ScriptGeoDistanceFacetExecutor extends GeoDistanceFacetExecutor {
-
-    private final SearchScript script;
-
-    public ScriptGeoDistanceFacetExecutor(IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
-                                          GeoDistanceFacet.Entry[] entries, SearchContext context,
-                                          String scriptLang, String script, ScriptService.ScriptType scriptType, Map<String, Object> params) {
-        super(indexFieldData, lat, lon, unit, geoDistance, entries, context);
-        this.script = context.scriptService().search(context.lookup(), scriptLang, script, scriptType, params);
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector(new ScriptAggregator(fixedSourceDistance, entries, script));
-    }
-
-    class Collector extends GeoDistanceFacetExecutor.Collector {
-
-
-        private ScriptAggregator scriptAggregator;
-
-        Collector(ScriptAggregator aggregator) {
-            super(aggregator);
-            this.scriptAggregator = aggregator;
-        }
-
-        @Override
-        public void setScorer(Scorer scorer) throws IOException {
-            script.setScorer(scorer);
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            super.setNextReader(context);
-            script.setNextReader(context);
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-            script.setNextDocId(doc);
-            scriptAggregator.scriptValue = script.runAsDouble();
-            super.collect(doc);
-        }
-    }
-
-    public final static class ScriptAggregator extends GeoDistanceFacetExecutor.Aggregator{
-
-        private double scriptValue;
-
-        public ScriptAggregator(GeoDistance.FixedSourceDistance fixedSourceDistance, GeoDistanceFacet.Entry[] entries, SearchScript script) {
-            super(fixedSourceDistance, entries);
-        }
-        
-        @Override
-        protected void collectGeoPoint(GeoDistanceFacet.Entry entry, int docId, double distance) {
-            final double scriptValue = this.scriptValue;
-            entry.count++;
-            entry.totalCount++;
-            entry.total += scriptValue;
-            if (scriptValue < entry.min) {
-                entry.min = scriptValue;
-            }
-            if (scriptValue > entry.max) {
-                entry.max = scriptValue;
-            }
-        }
-       
-    }
-}

+ 0 - 92
src/main/java/org/elasticsearch/search/facet/geodistance/ValueGeoDistanceFacetExecutor.java

@@ -1,92 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.geodistance;
-
-import org.apache.lucene.index.AtomicReaderContext;
-import org.elasticsearch.common.geo.GeoDistance;
-import org.elasticsearch.common.unit.DistanceUnit;
-import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-
-/**
- *
- */
-public class ValueGeoDistanceFacetExecutor extends GeoDistanceFacetExecutor {
-
-    private final IndexNumericFieldData valueIndexFieldData;
-
-    public ValueGeoDistanceFacetExecutor(IndexGeoPointFieldData indexFieldData, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
-                                         GeoDistanceFacet.Entry[] entries, SearchContext context, IndexNumericFieldData valueIndexFieldData) {
-        super(indexFieldData, lat, lon, unit, geoDistance, entries, context);
-        this.valueIndexFieldData = valueIndexFieldData;
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector(new Aggregator(fixedSourceDistance, entries));
-    }
-
-    class Collector extends GeoDistanceFacetExecutor.Collector {
-
-        Collector(Aggregator aggregator) {
-            super(aggregator);
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            super.setNextReader(context);
-            ((Aggregator) this.aggregator).valueValues = valueIndexFieldData.load(context).getDoubleValues();
-        }
-    }
-
-    public static class Aggregator extends  GeoDistanceFacetExecutor.Aggregator {
-
-        SortedNumericDoubleValues valueValues;
-
-        public Aggregator(GeoDistance.FixedSourceDistance fixedSourceDistance, GeoDistanceFacet.Entry[] entries) {
-            super(fixedSourceDistance, entries);
-        }
-
-        
-        @Override
-        protected void collectGeoPoint(GeoDistanceFacet.Entry entry, int docId, double distance) {
-            entry.foundInDoc = true;
-            entry.count++;
-            valueValues.setDocument(docId);
-            int seek = valueValues.count();
-            for (int i = 0; i < seek; i++) {
-                double value = valueValues.valueAt(i);
-                entry.totalCount++;
-                entry.total += value;
-                if (value < entry.min) {
-                    entry.min = value;
-                }
-                if (value > entry.max) {
-                    entry.max = value;
-                }
-            }
-        }
-       
-    }
-}

+ 0 - 122
src/main/java/org/elasticsearch/search/facet/histogram/CountHistogramFacetExecutor.java

@@ -1,122 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet.histogram;
-
-import com.carrotsearch.hppc.LongLongOpenHashMap;
-import org.apache.lucene.index.AtomicReaderContext;
-import org.elasticsearch.common.recycler.Recycler;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
-import org.elasticsearch.search.facet.DoubleFacetAggregatorBase;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.InternalFacet;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-
-/**
- * A histogram facet collector that uses the same field as the key as well as the
- * value.
- */
-public class CountHistogramFacetExecutor extends FacetExecutor {
-
-    private final IndexNumericFieldData indexFieldData;
-    private final HistogramFacet.ComparatorType comparatorType;
-    final long interval;
-
-    final Recycler.V<LongLongOpenHashMap> counts;
-
-    public CountHistogramFacetExecutor(IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
-        this.comparatorType = comparatorType;
-        this.indexFieldData = indexFieldData;
-        this.interval = interval;
-
-        this.counts = context.cacheRecycler().longLongMap(-1);
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector();
-    }
-
-    @Override
-    public InternalFacet buildFacet(String facetName) {
-        InternalCountHistogramFacet.CountEntry[] entries = new InternalCountHistogramFacet.CountEntry[counts.v().size()];
-        final boolean[] states = counts.v().allocated;
-        final long[] keys = counts.v().keys;
-        final long[] values = counts.v().values;
-        int entryIndex = 0;
-        for (int i = 0; i < states.length; i++) {
-            if (states[i]) {
-                entries[entryIndex++] = new InternalCountHistogramFacet.CountEntry(keys[i], values[i]);
-            }
-        }
-        counts.close();
-        return new InternalCountHistogramFacet(facetName, comparatorType, entries);
-    }
-
-    public static long bucket(double value, long interval) {
-        return (((long) (value / interval)) * interval);
-    }
-
-    class Collector extends FacetExecutor.Collector {
-
-        private final HistogramProc histoProc;
-        private SortedNumericDoubleValues values;
-
-        public Collector() {
-            histoProc = new HistogramProc(interval, counts.v());
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            values = indexFieldData.load(context).getDoubleValues();
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-            histoProc.onDoc(doc, values);
-        }
-
-        @Override
-        public void postCollection() {
-        }
-    }
-
-    public final static class HistogramProc extends DoubleFacetAggregatorBase {
-
-        private final long interval;
-        private final LongLongOpenHashMap counts;
-
-        public HistogramProc(long interval, LongLongOpenHashMap counts) {
-            this.interval = interval;
-            this.counts = counts;
-        }
-
-        @Override
-        public void onValue(int docId, double value) {
-            long bucket = bucket(value, interval);
-            counts.addTo(bucket, 1);
-        }
-
-        public LongLongOpenHashMap counts() {
-            return counts;
-        }
-    }
-}

+ 0 - 133
src/main/java/org/elasticsearch/search/facet/histogram/FullHistogramFacetExecutor.java

@@ -1,133 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.histogram;
-
-import com.carrotsearch.hppc.LongObjectOpenHashMap;
-import org.apache.lucene.index.AtomicReaderContext;
-import org.elasticsearch.common.recycler.Recycler;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
-import org.elasticsearch.search.facet.DoubleFacetAggregatorBase;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.InternalFacet;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A histogram facet collector that uses the same field as the key as well as the
- * value.
- */
-public class FullHistogramFacetExecutor extends FacetExecutor {
-
-    private final IndexNumericFieldData indexFieldData;
-    private final HistogramFacet.ComparatorType comparatorType;
-    final long interval;
-
-    final Recycler.V<LongObjectOpenHashMap<InternalFullHistogramFacet.FullEntry>> entries;
-
-    public FullHistogramFacetExecutor(IndexNumericFieldData indexFieldData, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
-        this.comparatorType = comparatorType;
-        this.indexFieldData = indexFieldData;
-        this.interval = interval;
-
-        this.entries = context.cacheRecycler().longObjectMap(-1);
-    }
-
-    @Override
-    public Collector collector() {
-        return new Collector();
-    }
-
-    @Override
-    public InternalFacet buildFacet(String facetName) {
-        List<InternalFullHistogramFacet.FullEntry> fullEntries = new ArrayList<>(entries.v().size());
-        boolean[] states = entries.v().allocated;
-        Object[] values = entries.v().values;
-        for (int i = 0; i < states.length; i++) {
-            if (states[i]) {
-                fullEntries.add((InternalFullHistogramFacet.FullEntry) values[i]);
-            }
-        }
-        entries.close();
-        return new InternalFullHistogramFacet(facetName, comparatorType, fullEntries);
-    }
-
-    public static long bucket(double value, long interval) {
-        return (((long) (value / interval)) * interval);
-    }
-
-    class Collector extends FacetExecutor.Collector {
-
-        private final HistogramProc histoProc;
-        private SortedNumericDoubleValues values;
-
-        Collector() {
-            this.histoProc = new HistogramProc(interval, entries.v());
-        }
-
-        @Override
-        public void setNextReader(AtomicReaderContext context) throws IOException {
-            values = indexFieldData.load(context).getDoubleValues();
-        }
-
-        @Override
-        public void collect(int doc) throws IOException {
-            histoProc.onDoc(doc, values);
-        }
-
-        @Override
-        public void postCollection() {
-        }
-    }
-
-    public final static class HistogramProc extends DoubleFacetAggregatorBase {
-
-        final long interval;
-        final LongObjectOpenHashMap<InternalFullHistogramFacet.FullEntry> entries;
-
-        public HistogramProc(long interval, LongObjectOpenHashMap<InternalFullHistogramFacet.FullEntry> entries) {
-            this.interval = interval;
-            this.entries = entries;
-        }
-
-        @Override
-        public void onValue(int docId, double value) {
-            long bucket = bucket(value, interval);
-            InternalFullHistogramFacet.FullEntry entry = entries.get(bucket);
-            if (entry == null) {
-                entry = new InternalFullHistogramFacet.FullEntry(bucket, 1, value, value, 1, value);
-                entries.put(bucket, entry);
-            } else {
-                entry.count++;
-                entry.totalCount++;
-                entry.total += value;
-                if (value < entry.min) {
-                    entry.min = value;
-                }
-                if (value > entry.max) {
-                    entry.max = value;
-                }
-            }
-        }
-    }
-}

+ 0 - 180
src/main/java/org/elasticsearch/search/facet/histogram/HistogramFacet.java

@@ -1,180 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.histogram;
-
-import org.elasticsearch.ElasticsearchIllegalArgumentException;
-import org.elasticsearch.search.facet.Facet;
-
-import java.util.Comparator;
-import java.util.List;
-
-/**
- * Numeric histogram facet.
- */
-public interface HistogramFacet extends Facet, Iterable<HistogramFacet.Entry> {
-
-    /**
-     * The type of the filter facet.
-     */
-    public static final String TYPE = "histogram";
-
-    /**
-     * An ordered list of histogram facet entries.
-     */
-    List<? extends Entry> getEntries();
-
-    public static enum ComparatorType {
-        KEY((byte) 0, "key", new Comparator<Entry>() {
-
-            @Override
-            public int compare(Entry o1, Entry o2) {
-                // push nulls to the end
-                if (o1 == null) {
-                    if (o2 == null) {
-                        return 0;
-                    }
-                    return 1;
-                }
-                if (o2 == null) {
-                    return -1;
-                }
-                return (o1.getKey() < o2.getKey() ? -1 : (o1.getKey() == o2.getKey() ? 0 : 1));
-            }
-        }),
-        COUNT((byte) 1, "count", new Comparator<Entry>() {
-
-            @Override
-            public int compare(Entry o1, Entry o2) {
-                // push nulls to the end
-                if (o1 == null) {
-                    if (o2 == null) {
-                        return 0;
-                    }
-                    return 1;
-                }
-                if (o2 == null) {
-                    return -1;
-                }
-                return (o1.getCount() < o2.getCount() ? -1 : (o1.getCount() == o2.getCount() ? 0 : 1));
-            }
-        }),
-        TOTAL((byte) 2, "total", new Comparator<Entry>() {
-
-            @Override
-            public int compare(Entry o1, Entry o2) {
-                // push nulls to the end
-                if (o1 == null) {
-                    if (o2 == null) {
-                        return 0;
-                    }
-                    return 1;
-                }
-                if (o2 == null) {
-                    return -1;
-                }
-                return (o1.getTotal() < o2.getTotal() ? -1 : (o1.getTotal() == o2.getTotal() ? 0 : 1));
-            }
-        });
-
-        private final byte id;
-
-        private final String description;
-
-        private final Comparator<Entry> comparator;
-
-        ComparatorType(byte id, String description, Comparator<Entry> comparator) {
-            this.id = id;
-            this.description = description;
-            this.comparator = comparator;
-        }
-
-        public byte id() {
-            return this.id;
-        }
-
-        public String description() {
-            return this.description;
-        }
-
-        public Comparator<Entry> comparator() {
-            return comparator;
-        }
-
-        public static ComparatorType fromId(byte id) {
-            if (id == 0) {
-                return KEY;
-            } else if (id == 1) {
-                return COUNT;
-            } else if (id == 2) {
-                return TOTAL;
-            }
-            throw new ElasticsearchIllegalArgumentException("No type argument match for histogram comparator [" + id + "]");
-        }
-
-        public static ComparatorType fromString(String type) {
-            if ("key".equals(type)) {
-                return KEY;
-            } else if ("count".equals(type)) {
-                return COUNT;
-            } else if ("total".equals(type)) {
-                return TOTAL;
-            }
-            throw new ElasticsearchIllegalArgumentException("No type argument match for histogram comparator [" + type + "]");
-        }
-    }
-
-    public interface Entry {
-
-        /**
-         * The key value of the histogram.
-         */
-        long getKey();
-
-        /**
-         * The number of hits that fall within that key "range" or "interval".
-         */
-        long getCount();
-
-        /**
-         * The total count of values aggregated to compute the total.
-         */
-        long getTotalCount();
-
-        /**
-         * The sum / total of the value field that fall within this key "interval".
-         */
-        double getTotal();
-
-        /**
-         * The mean of this facet interval.
-         */
-        double getMean();
-
-        /**
-         * The minimum value.
-         */
-        double getMin();
-
-        /**
-         * The maximum value.
-         */
-        double getMax();
-    }
-}

+ 0 - 151
src/main/java/org/elasticsearch/search/facet/histogram/HistogramFacetBuilder.java

@@ -1,151 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.facet.histogram;
-
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.query.FilterBuilder;
-import org.elasticsearch.search.builder.SearchSourceBuilderException;
-import org.elasticsearch.search.facet.FacetBuilder;
-
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-/**
- * A facet builder of histogram facets.
- */
-public class HistogramFacetBuilder extends FacetBuilder {
-    private String keyFieldName;
-    private String valueFieldName;
-    private long interval = -1;
-    private HistogramFacet.ComparatorType comparatorType;
-
-    /**
-     * Constructs a new histogram facet with the provided facet logical name.
-     *
-     * @param name The logical name of the facet
-     */
-    public HistogramFacetBuilder(String name) {
-        super(name);
-    }
-
-    /**
-     * The field name to perform the histogram facet. Translates to perform the histogram facet
-     * using the provided field as both the {@link #keyField(String)} and {@link #valueField(String)}.
-     */
-    public HistogramFacetBuilder field(String field) {
-        this.keyFieldName = field;
-        return this;
-    }
-
-    /**
-     * The field name to use in order to control where the hit will "fall into" within the histogram
-     * entries. Essentially, using the key field numeric value, the hit will be "rounded" into the relevant
-     * bucket controlled by the interval.
-     */
-    public HistogramFacetBuilder keyField(String keyField) {
-        this.keyFieldName = keyField;
-        return this;
-    }
-
-    /**
-     * The field name to use as the value of the hit to compute data based on values within the interval
-     * (for example, total).
-     */
-    public HistogramFacetBuilder valueField(String valueField) {
-        this.valueFieldName = valueField;
-        return this;
-    }
-
-    /**
-     * The interval used to control the bucket "size" where each key value of a hit will fall into.
-     */
-    public HistogramFacetBuilder interval(long interval) {
-        this.interval = interval;
-        return this;
-    }
-
-    /**
-     * The interval used to control the bucket "size" where each key value of a hit will fall into.
-     */
-    public HistogramFacetBuilder interval(long interval, TimeUnit unit) {
-        return interval(unit.toMillis(interval));
-    }
-
-    public HistogramFacetBuilder comparator(HistogramFacet.ComparatorType comparatorType) {
-        this.comparatorType = comparatorType;
-        return this;
-    }
-
-    /**
-     * Should the facet run in global mode (not bounded by the search query) or not (bounded by
-     * the search query). Defaults to <tt>false</tt>.
-     */
-    public HistogramFacetBuilder global(boolean global) {
-        super.global(global);
-        return this;
-    }
-
-    /**
-     * An additional filter used to further filter down the set of documents the facet will run on.
-     */
-    public HistogramFacetBuilder facetFilter(FilterBuilder filter) {
-        this.facetFilter = filter;
-        return this;
-    }
-
-    /**
-     * Sets the nested path the facet will execute on. A match (root object) will then cause all the
-     * nested objects matching the path to be computed into the facet.
-     */
-    public HistogramFacetBuilder nested(String nested) {
-        this.nested = nested;
-        return this;
-    }
-
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        if (keyFieldName == null) {
-            throw new SearchSourceBuilderException("field must be set on histogram facet for facet [" + name + "]");
-        }
-        if (interval < 0) {
-            throw new SearchSourceBuilderException("interval must be set on histogram facet for facet [" + name + "]");
-        }
-        builder.startObject(name);
-
-        builder.startObject(HistogramFacet.TYPE);
-        if (valueFieldName != null) {
-            builder.field("key_field", keyFieldName);
-            builder.field("value_field", valueFieldName);
-        } else {
-            builder.field("field", keyFieldName);
-        }
-        builder.field("interval", interval);
-
-        if (comparatorType != null) {
-            builder.field("comparator", comparatorType.description());
-        }
-        builder.endObject();
-
-        addFilterFacetAndGlobal(builder, params);
-
-        builder.endObject();
-        return builder;
-    }
-}

+ 0 - 159
src/main/java/org/elasticsearch/search/facet/histogram/HistogramFacetParser.java

@@ -1,159 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.facet.histogram;
-
-import org.elasticsearch.common.component.AbstractComponent;
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.fielddata.IndexNumericFieldData;
-import org.elasticsearch.index.mapper.FieldMapper;
-import org.elasticsearch.script.ScriptService;
-import org.elasticsearch.search.facet.FacetExecutor;
-import org.elasticsearch.search.facet.FacetParser;
-import org.elasticsearch.search.facet.FacetPhaseExecutionException;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- *
- */
-public class HistogramFacetParser extends AbstractComponent implements FacetParser {
-
-    @Inject
-    public HistogramFacetParser(Settings settings) {
-        super(settings);
-        InternalHistogramFacet.registerStreams();
-    }
-
-    @Override
-    public String[] types() {
-        return new String[]{HistogramFacet.TYPE};
-    }
-
-    @Override
-    public FacetExecutor.Mode defaultMainMode() {
-        return FacetExecutor.Mode.COLLECTOR;
-    }
-
-    @Override
-    public FacetExecutor.Mode defaultGlobalMode() {
-        return FacetExecutor.Mode.COLLECTOR;
-    }
-
-    @Override
-    public FacetExecutor parse(String facetName, XContentParser parser, SearchContext context) throws IOException {
-        String keyField = null;
-        String valueField = null;
-        String keyScript = null;
-        String valueScript = null;
-        String scriptLang = null;
-        Map<String, Object> params = null;
-        long interval = 0;
-        HistogramFacet.ComparatorType comparatorType = HistogramFacet.ComparatorType.KEY;
-        XContentParser.Token token;
-        String fieldName = null;
-        ScriptService.ScriptType valueScriptType = null;
-        ScriptService.ScriptType keyScriptType = null;
-        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
-            if (token == XContentParser.Token.FIELD_NAME) {
-                fieldName = parser.currentName();
-            } else if (token == XContentParser.Token.START_OBJECT) {
-                if ("params".equals(fieldName)) {
-                    params = parser.map();
-                }
-            } else if (token.isValue()) {
-                if ("field".equals(fieldName)) {
-                    keyField = parser.text();
-                } else if ("key_field".equals(fieldName) || "keyField".equals(fieldName)) {
-                    keyField = parser.text();
-                } else if ("value_field".equals(fieldName) || "valueField".equals(fieldName)) {
-                    valueField = parser.text();
-                } else if ("interval".equals(fieldName)) {
-                    interval = parser.longValue();
-                } else if ("time_interval".equals(fieldName) || "timeInterval".equals(fieldName)) {
-                    interval = TimeValue.parseTimeValue(parser.text(), null).millis();
-                } else if ("order".equals(fieldName) || "comparator".equals(fieldName)) {
-                    comparatorType = HistogramFacet.ComparatorType.fromString(parser.text());
-                } else if (ScriptService.KEY_SCRIPT_INLINE.match(fieldName)) {
-                    keyScript = parser.text();
-                    keyScriptType = ScriptService.ScriptType.INLINE;
-                } else if (ScriptService.KEY_SCRIPT_ID.match(fieldName)) {
-                    keyScript = parser.text();
-                    keyScriptType = ScriptService.ScriptType.INDEXED;
-                } else if (ScriptService.KEY_SCRIPT_FILE.match(fieldName)) {
-                    keyScript = parser.text();
-                    keyScriptType = ScriptService.ScriptType.FILE;
-                } else if (ScriptService.VALUE_SCRIPT_INLINE.match(fieldName)) {
-                    valueScript = parser.text();
-                    valueScriptType = ScriptService.ScriptType.INLINE;
-                } else if (ScriptService.VALUE_SCRIPT_ID.match(fieldName)) {
-                    valueScript = parser.text();
-                    valueScriptType = ScriptService.ScriptType.INDEXED;
-                } else if (ScriptService.VALUE_SCRIPT_FILE.match(fieldName)) {
-                    valueScript = parser.text();
-                    valueScriptType = ScriptService.ScriptType.FILE;
-                } else if (ScriptService.SCRIPT_LANG.match(fieldName)) {
-                    scriptLang = parser.text();
-                }
-            }
-        }
-
-        if (keyScript != null && valueScript != null) {
-            return new ScriptHistogramFacetExecutor(scriptLang, keyScript, keyScriptType, valueScript, valueScriptType, params, interval, comparatorType, context);
-        }
-
-        if (keyField == null) {
-            throw new FacetPhaseExecutionException(facetName, "key field is required to be set for histogram facet, either using [field] or using [key_field]");
-        }
-
-        if (interval <= 0) {
-            throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for histogram facet");
-        }
-
-        FieldMapper keyMapper = context.smartNameFieldMapper(keyField);
-        if (keyMapper == null) {
-            throw new FacetPhaseExecutionException(facetName, "No mapping found for key_field [" + keyField + "]");
-        }
-        IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyMapper);
-
-        IndexNumericFieldData valueIndexFieldData = null;
-        if (valueField != null) {
-            FieldMapper valueMapper = context.smartNameFieldMapper(valueField);
-            if (valueMapper == null) {
-                throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + valueField + "]");
-            }
-            valueIndexFieldData = context.fieldData().getForField(valueMapper);
-        }
-
-        if (valueScript != null) {
-            return new ValueScriptHistogramFacetExecutor(keyIndexFieldData, scriptLang, valueScript, valueScriptType, params, interval, comparatorType, context);
-        } else if (valueField == null) {
-            return new CountHistogramFacetExecutor(keyIndexFieldData, interval, comparatorType, context);
-        } else if (keyField.equals(valueField)) {
-            return new FullHistogramFacetExecutor(keyIndexFieldData, interval, comparatorType, context);
-        } else {
-            // we have a value field, and its different than the key
-            return new ValueHistogramFacetExecutor(keyIndexFieldData, valueIndexFieldData, interval, comparatorType, context);
-        }
-    }
-}

部分文件因为文件数量过多而无法显示