|
@@ -12,6 +12,7 @@ and a list of `processors`:
|
|
|
"processors" : [ ... ]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
The `description` is a special field to store a helpful description of
|
|
|
what the pipeline does.
|
|
@@ -126,6 +127,7 @@ using `filter_path` to limit the response to just the `version`:
|
|
|
--------------------------------------------------
|
|
|
GET /_ingest/pipeline/my-pipeline-id?filter_path=*.version
|
|
|
--------------------------------------------------
|
|
|
+// CONSOLE
|
|
|
// TEST[continued]
|
|
|
|
|
|
This should give a small response that makes it both easy and inexpensive to parse:
|
|
@@ -209,6 +211,7 @@ POST _ingest/pipeline/_simulate
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
Here is the structure of a simulate request against an existing pipeline:
|
|
|
|
|
@@ -223,7 +226,7 @@ POST _ingest/pipeline/my-pipeline-id/_simulate
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
-
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
Here is an example of a simulate request with a pipeline defined in the request
|
|
|
and its response:
|
|
@@ -275,42 +278,36 @@ Response:
|
|
|
{
|
|
|
"doc": {
|
|
|
"_id": "id",
|
|
|
- "_ttl": null,
|
|
|
- "_parent": null,
|
|
|
"_index": "index",
|
|
|
- "_routing": null,
|
|
|
"_type": "type",
|
|
|
- "_timestamp": null,
|
|
|
"_source": {
|
|
|
"field2": "_value",
|
|
|
"foo": "bar"
|
|
|
},
|
|
|
"_ingest": {
|
|
|
- "timestamp": "2016-01-04T23:53:27.186+0000"
|
|
|
+ "timestamp": "2017-05-04T22:30:03.187Z"
|
|
|
}
|
|
|
}
|
|
|
},
|
|
|
{
|
|
|
"doc": {
|
|
|
"_id": "id",
|
|
|
- "_ttl": null,
|
|
|
- "_parent": null,
|
|
|
"_index": "index",
|
|
|
- "_routing": null,
|
|
|
"_type": "type",
|
|
|
- "_timestamp": null,
|
|
|
"_source": {
|
|
|
"field2": "_value",
|
|
|
"foo": "rab"
|
|
|
},
|
|
|
"_ingest": {
|
|
|
- "timestamp": "2016-01-04T23:53:27.186+0000"
|
|
|
+ "timestamp": "2017-05-04T22:30:03.188Z"
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// TESTRESPONSE[s/"2017-05-04T22:30:03.187Z"/$body.docs.0.doc._ingest.timestamp/]
|
|
|
+// TESTRESPONSE[s/"2017-05-04T22:30:03.188Z"/$body.docs.1.doc._ingest.timestamp/]
|
|
|
|
|
|
[[ingest-verbose-param]]
|
|
|
==== Viewing Verbose Results
|
|
@@ -374,41 +371,31 @@ Response:
|
|
|
{
|
|
|
"processor_results": [
|
|
|
{
|
|
|
- "tag": "processor[set]-0",
|
|
|
"doc": {
|
|
|
"_id": "id",
|
|
|
- "_ttl": null,
|
|
|
- "_parent": null,
|
|
|
"_index": "index",
|
|
|
- "_routing": null,
|
|
|
"_type": "type",
|
|
|
- "_timestamp": null,
|
|
|
"_source": {
|
|
|
"field2": "_value2",
|
|
|
"foo": "bar"
|
|
|
},
|
|
|
"_ingest": {
|
|
|
- "timestamp": "2016-01-05T00:02:51.383+0000"
|
|
|
+ "timestamp": "2017-05-04T22:46:09.674Z"
|
|
|
}
|
|
|
}
|
|
|
},
|
|
|
{
|
|
|
- "tag": "processor[set]-1",
|
|
|
"doc": {
|
|
|
"_id": "id",
|
|
|
- "_ttl": null,
|
|
|
- "_parent": null,
|
|
|
"_index": "index",
|
|
|
- "_routing": null,
|
|
|
"_type": "type",
|
|
|
- "_timestamp": null,
|
|
|
"_source": {
|
|
|
"field3": "_value3",
|
|
|
"field2": "_value2",
|
|
|
"foo": "bar"
|
|
|
},
|
|
|
"_ingest": {
|
|
|
- "timestamp": "2016-01-05T00:02:51.383+0000"
|
|
|
+ "timestamp": "2017-05-04T22:46:09.675Z"
|
|
|
}
|
|
|
}
|
|
|
}
|
|
@@ -417,41 +404,31 @@ Response:
|
|
|
{
|
|
|
"processor_results": [
|
|
|
{
|
|
|
- "tag": "processor[set]-0",
|
|
|
"doc": {
|
|
|
"_id": "id",
|
|
|
- "_ttl": null,
|
|
|
- "_parent": null,
|
|
|
"_index": "index",
|
|
|
- "_routing": null,
|
|
|
"_type": "type",
|
|
|
- "_timestamp": null,
|
|
|
"_source": {
|
|
|
"field2": "_value2",
|
|
|
"foo": "rab"
|
|
|
},
|
|
|
"_ingest": {
|
|
|
- "timestamp": "2016-01-05T00:02:51.384+0000"
|
|
|
+ "timestamp": "2017-05-04T22:46:09.676Z"
|
|
|
}
|
|
|
}
|
|
|
},
|
|
|
{
|
|
|
- "tag": "processor[set]-1",
|
|
|
"doc": {
|
|
|
"_id": "id",
|
|
|
- "_ttl": null,
|
|
|
- "_parent": null,
|
|
|
"_index": "index",
|
|
|
- "_routing": null,
|
|
|
"_type": "type",
|
|
|
- "_timestamp": null,
|
|
|
"_source": {
|
|
|
"field3": "_value3",
|
|
|
"field2": "_value2",
|
|
|
"foo": "rab"
|
|
|
},
|
|
|
"_ingest": {
|
|
|
- "timestamp": "2016-01-05T00:02:51.384+0000"
|
|
|
+ "timestamp": "2017-05-04T22:46:09.677Z"
|
|
|
}
|
|
|
}
|
|
|
}
|
|
@@ -460,6 +437,10 @@ Response:
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// TESTRESPONSE[s/"2017-05-04T22:46:09.674Z"/$body.docs.0.processor_results.0.doc._ingest.timestamp/]
|
|
|
+// TESTRESPONSE[s/"2017-05-04T22:46:09.675Z"/$body.docs.0.processor_results.1.doc._ingest.timestamp/]
|
|
|
+// TESTRESPONSE[s/"2017-05-04T22:46:09.676Z"/$body.docs.1.processor_results.0.doc._ingest.timestamp/]
|
|
|
+// TESTRESPONSE[s/"2017-05-04T22:46:09.677Z"/$body.docs.1.processor_results.1.doc._ingest.timestamp/]
|
|
|
|
|
|
[[accessing-data-in-pipelines]]
|
|
|
== Accessing Data in Pipelines
|
|
@@ -482,6 +463,7 @@ their name. For example:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
On top of this, fields from the source are always accessible via the `_source` prefix:
|
|
|
|
|
@@ -494,6 +476,7 @@ On top of this, fields from the source are always accessible via the `_source` p
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[float]
|
|
|
[[accessing-metadata-fields]]
|
|
@@ -513,6 +496,7 @@ The following example sets the `_id` metadata field of a document to `1`:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
The following metadata fields are accessible by a processor: `_index`, `_type`, `_id`, `_routing`, `_parent`.
|
|
|
|
|
@@ -538,6 +522,7 @@ The following example adds a field with the name `received`. The value is the in
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
Unlike Elasticsearch metadata fields, the ingest metadata field name `_ingest` can be used as a valid field name
|
|
|
in the source of a document. Use `_source._ingest` to refer to the field in the source document. Otherwise, `_ingest`
|
|
@@ -562,6 +547,7 @@ the values of `field_a` and `field_b`.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
The following example uses the value of the `geoip.country_iso_code` field in the source
|
|
|
to set the index that the document will be indexed into:
|
|
@@ -575,6 +561,7 @@ to set the index that the document will be indexed into:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[handling-failure-in-pipelines]]
|
|
|
== Handling Failures in Pipelines
|
|
@@ -620,6 +607,7 @@ Elasticsearch.
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
The following example defines an `on_failure` block on a whole pipeline to change
|
|
|
the index to which failed documents get sent.
|
|
@@ -639,6 +627,7 @@ the index to which failed documents get sent.
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
Alternatively instead of defining behaviour in case of processor failure, it is also possible
|
|
|
to ignore a failure and continue with the next processor by specifying the `ignore_failure` setting.
|
|
@@ -661,6 +650,7 @@ continues to execute, which in this case means that the pipeline does nothing.
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
The `ignore_failure` can be set on any processor and defaults to `false`.
|
|
|
|
|
@@ -699,6 +689,7 @@ metadata field to provide the error message.
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[ingest-processors]]
|
|
|
== Processors
|
|
@@ -713,6 +704,7 @@ All processors are defined in the following way within a pipeline definition:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
Each processor defines its own configuration parameters, but all processors have
|
|
|
the ability to declare `tag` and `on_failure` fields. These fields are optional.
|
|
@@ -765,6 +757,7 @@ Accepts a single value or an array of values.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[convert-processor]]
|
|
|
=== Convert Processor
|
|
@@ -802,6 +795,7 @@ such a case, `target_field` will still be updated with the unconverted field val
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[date-processor]]
|
|
|
=== Date Processor
|
|
@@ -842,6 +836,7 @@ Here is an example that adds the parsed date to the `timestamp` field based on t
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[date-index-name-processor]]
|
|
|
=== Date Index Name Processor
|
|
@@ -1011,6 +1006,7 @@ to the requester.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[foreach-processor]]
|
|
|
=== Foreach Processor
|
|
@@ -1059,6 +1055,7 @@ Assume the following document:
|
|
|
"values" : ["foo", "bar", "baz"]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
When this `foreach` processor operates on this sample document:
|
|
|
|
|
@@ -1075,6 +1072,7 @@ When this `foreach` processor operates on this sample document:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
Then the document will look like this after preprocessing:
|
|
|
|
|
@@ -1084,6 +1082,7 @@ Then the document will look like this after preprocessing:
|
|
|
"values" : ["FOO", "BAR", "BAZ"]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
Let's take a look at another example:
|
|
|
|
|
@@ -1102,6 +1101,7 @@ Let's take a look at another example:
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
In this case, the `id` field needs to be removed,
|
|
|
so the following `foreach` processor is used:
|
|
@@ -1119,6 +1119,7 @@ so the following `foreach` processor is used:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
After preprocessing the result is:
|
|
|
|
|
@@ -1135,6 +1136,7 @@ After preprocessing the result is:
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
The wrapped processor can have a `on_failure` definition.
|
|
|
For example, the `id` field may not exist on all person objects.
|
|
@@ -1162,6 +1164,7 @@ block to send the document to the 'failure_index' index for later inspection:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
In this example, if the `remove` processor does fail, then
|
|
|
the array elements that have been processed thus far will
|
|
@@ -1210,7 +1213,7 @@ The `TYPE` is the type you wish to cast your named field. `int` and `float` are
|
|
|
|
|
|
For example, you might want to match the following text:
|
|
|
|
|
|
-[source,js]
|
|
|
+[source,txt]
|
|
|
--------------------------------------------------
|
|
|
3.44 55.3.244.1
|
|
|
--------------------------------------------------
|
|
@@ -1218,7 +1221,7 @@ For example, you might want to match the following text:
|
|
|
You may know that the message in the example is a number followed by an IP address. You can match this text by using the following
|
|
|
Grok expression.
|
|
|
|
|
|
-[source,js]
|
|
|
+[source,txt]
|
|
|
--------------------------------------------------
|
|
|
%{NUMBER:duration} %{IP:client}
|
|
|
--------------------------------------------------
|
|
@@ -1247,10 +1250,11 @@ a document.
|
|
|
"message": "55.3.244.1 GET /index.html 15824 0.043"
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
The pattern for this could be:
|
|
|
|
|
|
-[source,js]
|
|
|
+[source,txt]
|
|
|
--------------------------------------------------
|
|
|
%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration}
|
|
|
--------------------------------------------------
|
|
@@ -1271,6 +1275,7 @@ Here is an example pipeline for processing the above document by using Grok:
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
This pipeline will insert these named captures as new fields within the document, like so:
|
|
|
|
|
@@ -1285,6 +1290,7 @@ This pipeline will insert these named captures as new fields within the document
|
|
|
"duration": "0.043"
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[custom-patterns]]
|
|
|
==== Custom Patterns and Pattern Files
|
|
@@ -1313,6 +1319,7 @@ Here is an example of a pipeline specifying custom pattern definitions:
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[trace-match]]
|
|
|
==== Providing Multiple Match Patterns
|
|
@@ -1472,6 +1479,7 @@ If the field is not a string, the processor will throw an exception.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[join-processor]]
|
|
|
=== Join Processor
|
|
@@ -1496,6 +1504,7 @@ Throws an error when the field is not an array.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[json-processor]]
|
|
|
=== JSON Processor
|
|
@@ -1522,6 +1531,7 @@ Suppose you provide this configuration of the `json` processor:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
If the following document is processed:
|
|
|
|
|
@@ -1531,6 +1541,7 @@ If the following document is processed:
|
|
|
"string_source": "{\"foo\": 2000}"
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
after the `json` processor operates on it, it will look like:
|
|
|
|
|
@@ -1543,6 +1554,7 @@ after the `json` processor operates on it, it will look like:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
If the following configuration is provided, omitting the optional `target_field` setting:
|
|
|
[source,js]
|
|
@@ -1553,6 +1565,7 @@ If the following configuration is provided, omitting the optional `target_field`
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
then after the `json` processor operates on this document:
|
|
|
|
|
@@ -1562,6 +1575,7 @@ then after the `json` processor operates on this document:
|
|
|
"source_and_target": "{\"foo\": 2000}"
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
it will look like:
|
|
|
|
|
@@ -1573,8 +1587,9 @@ it will look like:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
-This illustrates that, unless it is explicitly named in the processor configuration, the `target_field`
|
|
|
+This illustrates that, unless it is explicitly named in the processor configuration, the `target_field`
|
|
|
is the same field provided in the required `field` configuration.
|
|
|
|
|
|
[[kv-processor]]
|
|
@@ -1594,6 +1609,7 @@ For example, if you have a log message which contains `ip=1.2.3.4 error=REFUSED`
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[kv-options]]
|
|
|
.Kv Options
|
|
@@ -1630,6 +1646,7 @@ Converts a string to its lowercase equivalent.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[remove-processor]]
|
|
|
=== Remove Processor
|
|
@@ -1651,6 +1668,7 @@ Removes an existing field. If the field doesn't exist, an exception will be thro
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[rename-processor]]
|
|
|
=== Rename Processor
|
|
@@ -1675,6 +1693,7 @@ Renames an existing field. If the field doesn't exist or the new name is already
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[script-processor]]
|
|
|
=== Script Processor
|
|
@@ -1718,6 +1737,7 @@ numeric fields `field_a` and `field_b` multiplied by the parameter param_c:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
|
|
|
[[set-processor]]
|
|
@@ -1744,6 +1764,7 @@ its value will be replaced with the provided one.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[split-processor]]
|
|
|
=== Split Processor
|
|
@@ -1768,6 +1789,7 @@ Splits a field into an array using a separator character. Only works on string f
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
<1> Treat all consecutive whitespace characters as a single separator
|
|
|
|
|
|
[[sort-processor]]
|
|
@@ -1794,6 +1816,7 @@ Throws an error when the field is not an array.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[trim-processor]]
|
|
|
=== Trim Processor
|
|
@@ -1818,6 +1841,7 @@ NOTE: This only works on leading and trailing whitespace.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[uppercase-processor]]
|
|
|
=== Uppercase Processor
|
|
@@ -1840,6 +1864,7 @@ Converts a string to its uppercase equivalent.
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
[[dot-expand-processor]]
|
|
|
=== Dot Expander Processor
|
|
@@ -1865,6 +1890,7 @@ Otherwise these <<accessing-data-in-pipelines,fields>> can't be accessed by any
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
For example the dot expand processor would turn this document:
|
|
|
|
|
@@ -1874,6 +1900,7 @@ For example the dot expand processor would turn this document:
|
|
|
"foo.bar" : "value"
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
into:
|
|
|
|
|
@@ -1885,6 +1912,7 @@ into:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
If there is already a `bar` field nested under `foo` then
|
|
|
this processor merges the the `foo.bar` field into it. If the field is
|
|
@@ -1901,6 +1929,7 @@ For example, the following document:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
is transformed by the `dot_expander` processor into:
|
|
|
|
|
@@ -1912,6 +1941,7 @@ is transformed by the `dot_expander` processor into:
|
|
|
}
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
If any field outside of the leaf field conflicts with a pre-existing field of the same name,
|
|
|
then that field needs to be renamed first.
|
|
@@ -1925,6 +1955,7 @@ Consider the following document:
|
|
|
"foo.bar": "value2"
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
Then the the `foo` needs to be renamed first before the `dot_expander`
|
|
|
processor is applied. So in order for the `foo.bar` field to properly
|
|
@@ -1949,6 +1980,7 @@ pipeline should be used:
|
|
|
]
|
|
|
}
|
|
|
--------------------------------------------------
|
|
|
+// NOTCONSOLE
|
|
|
|
|
|
The reason for this is that Ingest doesn't know how to automatically cast
|
|
|
a scalar field to an object field.
|