Browse Source

Align JSON logs better with ECS (#67266)

The JSON logs that Elasticsearch produces are roughly in an ECS shape. This PR improves
that alignment.
Rory Hunter 4 years ago
parent
commit
c841b2c073

+ 4 - 4
distribution/docker/src/docker/config/oss/log4j2.properties

@@ -3,7 +3,7 @@ status = error
 appender.rolling.type = Console
 appender.rolling.name = rolling
 appender.rolling.layout.type = ECSJsonLayout
-appender.rolling.layout.type_name = server
+appender.rolling.layout.dataset = elasticsearch.server
 
 rootLogger.level = info
 rootLogger.appenderRef.rolling.ref = rolling
@@ -14,7 +14,7 @@ appender.header_warning.name = header_warning
 appender.deprecation_rolling.type = Console
 appender.deprecation_rolling.name = deprecation_rolling
 appender.deprecation_rolling.layout.type = ECSJsonLayout
-appender.deprecation_rolling.layout.type_name = deprecation
+appender.deprecation_rolling.layout.dataset = elasticsearch.deprecation
 appender.deprecation_rolling.filter.rate_limit.type = RateLimitingFilter
 
 logger.deprecation.name = org.elasticsearch.deprecation
@@ -26,7 +26,7 @@ logger.deprecation.additivity = false
 appender.index_search_slowlog_rolling.type = Console
 appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
 appender.index_search_slowlog_rolling.layout.type = ECSJsonLayout
-appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
+appender.index_search_slowlog_rolling.layout.dataset = elasticsearch.index_search_slowlog
 
 logger.index_search_slowlog_rolling.name = index.search.slowlog
 logger.index_search_slowlog_rolling.level = trace
@@ -36,7 +36,7 @@ logger.index_search_slowlog_rolling.additivity = false
 appender.index_indexing_slowlog_rolling.type = Console
 appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
 appender.index_indexing_slowlog_rolling.layout.type = ECSJsonLayout
-appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog
+appender.index_indexing_slowlog_rolling.layout.dataset = elasticsearch.index_indexing_slowlog
 
 logger.index_indexing_slowlog.name = index.indexing.slowlog.index
 logger.index_indexing_slowlog.level = trace

+ 5 - 2
distribution/docker/transform-log4j-config/src/test/java/org/elasticsearch/transform/log4j/TransformLog4jConfigTests.java

@@ -101,13 +101,16 @@ public class TransformLog4jConfigTests extends TestCase {
         List<String> input = List.of(
             "appender.rolling.fileName = ${sys:es.logs.base_path}/${sys:es.logs.cluster_name}_server.json",
             "appender.rolling.layout.type = ECSJsonLayout",
-            "appender.rolling.layout.type_name = server",
+            "appender.rolling.layout.dataset = elasticsearch.server",
             "appender.rolling.filePattern = ${sys:es.logs.base_path}/${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.json.gz",
             "appender.rolling.policies.type = Policies",
             "appender.rolling.strategy.type = DefaultRolloverStrategy"
         );
 
-        List<String> expected = List.of("appender.rolling.layout.type = ECSJsonLayout", "appender.rolling.layout.type_name = server");
+        List<String> expected = List.of(
+            "appender.rolling.layout.type = ECSJsonLayout",
+            "appender.rolling.layout.dataset = elasticsearch.server"
+        );
 
         runTest(input, expected);
     }

+ 4 - 4
distribution/src/config/log4j2.properties

@@ -10,7 +10,7 @@ appender.rolling.type = RollingFile
 appender.rolling.name = rolling
 appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_server.json
 appender.rolling.layout.type = ECSJsonLayout
-appender.rolling.layout.type_name = server
+appender.rolling.layout.dataset = elasticsearch.server
 
 appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.json.gz
 appender.rolling.policies.type = Policies
@@ -62,7 +62,7 @@ appender.deprecation_rolling.type = RollingFile
 appender.deprecation_rolling.name = deprecation_rolling
 appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.json
 appender.deprecation_rolling.layout.type = ECSJsonLayout
-appender.deprecation_rolling.layout.type_name = deprecation
+appender.deprecation_rolling.layout.dataset = elasticsearch.deprecation
 appender.deprecation_rolling.filter.rate_limit.type = RateLimitingFilter
 
 appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.json.gz
@@ -88,7 +88,7 @@ appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
 appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
   .cluster_name}_index_search_slowlog.json
 appender.index_search_slowlog_rolling.layout.type = ECSJsonLayout
-appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
+appender.index_search_slowlog_rolling.layout.dataset = elasticsearch.index_search_slowlog
 
 appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
   .cluster_name}_index_search_slowlog-%i.json.gz
@@ -111,7 +111,7 @@ appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
 appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
   _index_indexing_slowlog.json
 appender.index_indexing_slowlog_rolling.layout.type = ECSJsonLayout
-appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog
+appender.index_indexing_slowlog_rolling.layout.dataset = elasticsearch.index_indexing_slowlog
 
 
 appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\

+ 8 - 8
docs/reference/setup/logging-config.asciidoc

@@ -39,8 +39,8 @@ will resolve to `/var/log/elasticsearch/production.log`.
 appender.rolling.type = RollingFile <1>
 appender.rolling.name = rolling
 appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_server.json <2>
-appender.rolling.layout.type = ESJsonLayout <3>
-appender.rolling.layout.type_name = server <4>
+appender.rolling.layout.type = ECSJsonLayout <3>
+appender.rolling.layout.dataset = elasticsearch.server <4>
 appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.json.gz <5>
 appender.rolling.policies.type = Policies
 appender.rolling.policies.time.type = TimeBasedTriggeringPolicy <6>
@@ -62,7 +62,7 @@ appender.rolling.strategy.action.condition.nested_condition.exceeds = 2GB <15>
 <1> Configure the `RollingFile` appender
 <2> Log to `/var/log/elasticsearch/production_server.json`
 <3> Use JSON layout.
-<4> `type_name` is a flag populating the `type` field in a `ESJsonLayout`.
+<4> `dataset` is a flag populating the `event.dataset` field in a `ECSJsonLayout`.
  It can be used to distinguish different types of logs more easily when parsing them.
 <5> Roll logs to `/var/log/elasticsearch/production-yyyy-MM-dd-i.json`; logs
     will be compressed on each roll and `i` will be incremented
@@ -245,17 +245,17 @@ The user ID is included in the `X-Opaque-ID` field in deprecation JSON logs.
 === JSON log format
 
 To make parsing Elasticsearch logs easier, logs are now printed in a JSON format.
-This is configured by a Log4J layout property `appender.rolling.layout.type = ESJsonLayout`.
-This layout requires a `type_name` attribute to be set which is used to distinguish
+This is configured by a Log4J layout property `appender.rolling.layout.type = ECSJsonLayout`.
+This layout requires a `dataset` attribute to be set which is used to distinguish
 logs streams when parsing.
 [source,properties]
 --------------------------------------------------
-appender.rolling.layout.type = ESJsonLayout
-appender.rolling.layout.type_name = server
+appender.rolling.layout.type = ECSJsonLayout
+appender.rolling.layout.dataset = elasticsearch.server
 --------------------------------------------------
 :es-json-layout-java-doc: {elasticsearch-javadoc}/org/elasticsearch/common/logging/ESJsonLayout.html
 
-Each line contains a single JSON document with the properties configured in `ESJsonLayout`.
+Each line contains a single JSON document with the properties configured in `ECSJsonLayout`.
 See this class {es-json-layout-java-doc}[javadoc] for more details.
 However if a JSON document contains an exception, it will be printed over multiple lines.
 The first line will contain regular properties and subsequent lines will contain the

+ 42 - 45
qa/logging-config/src/test/java/org/elasticsearch/common/logging/JsonLoggerTests.java

@@ -100,21 +100,19 @@ public class JsonLoggerTests extends ESTestCase {
 
             assertThat(jsonLogs, contains(
                 allOf(
-                    hasEntry("type", "deprecation"),
+                    hasEntry("event.dataset", "elasticsearch.deprecation"),
                     hasEntry("log.level", "DEPRECATION"),
                     hasEntry("log.logger", "deprecation.test"),
-                    hasEntry("cluster.name", "elasticsearch"),
-                    hasEntry("node.name", "sample-name"),
+                    hasEntry("elasticsearch.cluster.name", "elasticsearch"),
+                    hasEntry("elasticsearch.node.name", "sample-name"),
                     hasEntry("message", "deprecated message1"),
                     hasEntry("data_stream.type", "logs"),
-                    hasEntry("data_stream.dataset", "deprecation.elasticsearch"),
-                    hasEntry("data_stream.namespace", "default"),
+                    hasEntry("data_stream.dataset", "elasticsearch.deprecation"),
                     hasEntry("ecs.version", DeprecatedMessage.ECS_VERSION),
-                    hasEntry("key", "a key"),
-                    not(hasKey("x-opaque-id")),
+                    hasEntry(DeprecatedMessage.KEY_FIELD_NAME, "a key"),
+                    not(hasKey(DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME)),
                     hasEntry("elasticsearch.event.category", "other")
-                )
-                )
+                ))
             );
         }
 
@@ -139,18 +137,17 @@ public class JsonLoggerTests extends ESTestCase {
                     jsonLogs,
                     contains(
                         allOf(
-                            hasEntry("type", "deprecation"),
+                            hasEntry("event.dataset", "elasticsearch.deprecation"),
                             hasEntry("log.level", "DEPRECATION"),
                             hasEntry("log.logger", "deprecation.test"),
-                            hasEntry("cluster.name", "elasticsearch"),
-                            hasEntry("node.name", "sample-name"),
+                            hasEntry("elasticsearch.cluster.name", "elasticsearch"),
+                            hasEntry("elasticsearch.node.name", "sample-name"),
                             hasEntry("message", "deprecated message1"),
                             hasEntry("data_stream.type", "logs"),
-                            hasEntry("data_stream.dataset", "deprecation.elasticsearch"),
-                            hasEntry("data_stream.namespace", "default"),
+                            hasEntry("data_stream.dataset", "elasticsearch.deprecation"),
                             hasEntry("ecs.version", DeprecatedMessage.ECS_VERSION),
-                            hasEntry("key", "someKey"),
-                            hasEntry("x-opaque-id", "someId"),
+                            hasEntry(DeprecatedMessage.KEY_FIELD_NAME, "someKey"),
+                            hasEntry(DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME, "someId"),
                             hasEntry("elasticsearch.event.category", "other")
                         )
                     )
@@ -177,11 +174,11 @@ public class JsonLoggerTests extends ESTestCase {
 
             assertThat(jsonLogs, contains(
                 allOf(
-                    hasEntry("type", "file"),
+                    hasEntry("event.dataset", "elasticsearch.file"),
                     hasEntry("log.level", "INFO"),
                     hasEntry("log.logger", "test"),
-                    hasEntry("cluster.name", "elasticsearch"),
-                    hasEntry("node.name", "sample-name"),
+                    hasEntry("elasticsearch.cluster.name", "elasticsearch"),
+                    hasEntry("elasticsearch.node.name", "sample-name"),
                     hasEntry("message", "some message value0 value1"),
                     hasEntry("key1", "value1"),
                     hasEntry("key2", "value2"))
@@ -205,11 +202,11 @@ public class JsonLoggerTests extends ESTestCase {
 
             assertThat(jsonLogs, contains(
                 allOf(
-                    hasEntry("type", "file"),
+                    hasEntry("event.dataset", "elasticsearch.file"),
                     hasEntry("log.level", "INFO"),
                     hasEntry("log.logger", "test"),
-                    hasEntry("cluster.name", "elasticsearch"),
-                    hasEntry("node.name", "sample-name"),
+                    hasEntry("elasticsearch.cluster.name", "elasticsearch"),
+                    hasEntry("elasticsearch.node.name", "sample-name"),
                     hasEntry("field1", "value1"),
                     hasEntry("field2", "value2"),
                     hasEntry("message", "some message"))
@@ -231,11 +228,11 @@ public class JsonLoggerTests extends ESTestCase {
             List<JsonLogLine> jsonLogs = collectLines(stream);
 
             assertThat(jsonLogs, contains(
-                logLine("file", Level.ERROR, "sample-name", "test", "This is an error message"),
-                logLine("file", Level.WARN, "sample-name", "test", "This is a warning message"),
-                logLine("file", Level.INFO, "sample-name", "test", "This is an info message"),
-                logLine("file", Level.DEBUG, "sample-name", "test", "This is a debug message"),
-                logLine("file", Level.TRACE, "sample-name", "test", "This is a trace message")
+                logLine("elasticsearch.file", Level.ERROR, "sample-name", "test", "This is an error message"),
+                logLine("elasticsearch.file", Level.WARN, "sample-name", "test", "This is a warning message"),
+                logLine("elasticsearch.file", Level.INFO, "sample-name", "test", "This is an info message"),
+                logLine("elasticsearch.file", Level.DEBUG, "sample-name", "test", "This is a debug message"),
+                logLine("elasticsearch.file", Level.TRACE, "sample-name", "test", "This is a trace message")
             ));
         }
     }
@@ -251,9 +248,9 @@ public class JsonLoggerTests extends ESTestCase {
         try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) {
             List<JsonLogLine> jsonLogs = collectLines(stream);
             assertThat(jsonLogs, contains(
-                logLine("file", Level.INFO, "sample-name", "prefix.shardIdLogger",
+                logLine("elasticsearch.file", Level.INFO, "sample-name", "prefix.shardIdLogger",
                     "This is an info message with a shardId", Map.of(JsonLogLine::getTags, List.of("[indexName][123]"))),
-                logLine("file", Level.INFO, "sample-name", "prefix.prefixLogger",
+                logLine("elasticsearch.file", Level.INFO, "sample-name", "prefix.prefixLogger",
                     "This is an info message with a prefix", Map.of(JsonLogLine::getTags, List.of("PREFIX")))
             ));
         }
@@ -278,7 +275,7 @@ public class JsonLoggerTests extends ESTestCase {
         try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) {
             List<JsonLogLine> jsonLogs = collectLines(stream);
             assertThat(jsonLogs, contains(
-                logLine("file", Level.INFO, "sample-name", "test", json)
+                logLine("elasticsearch.file", Level.INFO, "sample-name", "test", json)
             ));
         }
     }
@@ -292,7 +289,7 @@ public class JsonLoggerTests extends ESTestCase {
             List<JsonLogLine> jsonLogs = collectLines(stream);
             assertThat(jsonLogs, contains(
                 allOf(
-                    logLine("file", Level.ERROR, "sample-name", "test", "error message"),
+                    logLine("elasticsearch.file", Level.ERROR, "sample-name", "test", "error message"),
                     stacktraceMatches("java.lang.Exception: exception message.*Caused by: java.lang.RuntimeException: cause message.*")
                 )
             ));
@@ -322,7 +319,7 @@ public class JsonLoggerTests extends ESTestCase {
             assertThat(jsonLogs, contains(
                 allOf(
                     //message field will have a single line with json escaped
-                    logLine("file", Level.ERROR, "sample-name", "test", "error message " + json),
+                    logLine("elasticsearch.file", Level.ERROR, "sample-name", "test", "error message " + json),
 
                     //stacktrace message will be single line
                     stacktraceWith("java.lang.Exception: " + json)
@@ -350,13 +347,13 @@ public class JsonLoggerTests extends ESTestCase {
 
                 assertThat(jsonLogs, contains(
                     allOf(
-                        hasEntry("type", "deprecation"),
+                        hasEntry("event.dataset", "elasticsearch.deprecation"),
                         hasEntry("log.level", "DEPRECATION"),
                         hasEntry("log.logger", "deprecation.test"),
-                        hasEntry("cluster.name", "elasticsearch"),
-                        hasEntry("node.name", "sample-name"),
+                        hasEntry("elasticsearch.cluster.name", "elasticsearch"),
+                        hasEntry("elasticsearch.node.name", "sample-name"),
                         hasEntry("message", "message1"),
-                        hasEntry("x-opaque-id", "ID1"),
+                        hasEntry(DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME, "ID1"),
                         hasEntry("elasticsearch.event.category", "other"))
                     )
                 );
@@ -382,23 +379,23 @@ public class JsonLoggerTests extends ESTestCase {
                     jsonLogs,
                     contains(
                         allOf(
-                            hasEntry("type", "deprecation"),
+                            hasEntry("event.dataset", "elasticsearch.deprecation"),
                             hasEntry("log.level", "DEPRECATION"),
                             hasEntry("log.logger", "deprecation.test"),
-                            hasEntry("cluster.name", "elasticsearch"),
-                            hasEntry("node.name", "sample-name"),
+                            hasEntry("elasticsearch.cluster.name", "elasticsearch"),
+                            hasEntry("elasticsearch.node.name", "sample-name"),
                             hasEntry("message", "message1"),
-                            hasEntry("x-opaque-id", "ID1"),
+                            hasEntry(DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME, "ID1"),
                             hasEntry("elasticsearch.event.category", "other")
                         ),
                         allOf(
-                            hasEntry("type", "deprecation"),
+                            hasEntry("event.dataset", "elasticsearch.deprecation"),
                             hasEntry("log.level", "DEPRECATION"),
                             hasEntry("log.logger", "deprecation.test"),
-                            hasEntry("cluster.name", "elasticsearch"),
-                            hasEntry("node.name", "sample-name"),
+                            hasEntry("elasticsearch.cluster.name", "elasticsearch"),
+                            hasEntry("elasticsearch.node.name", "sample-name"),
                             hasEntry("message", "message1"),
-                            hasEntry("x-opaque-id", "ID2"),
+                            hasEntry(DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME, "ID2"),
                             hasEntry("elasticsearch.event.category", "other")
                         )
                     )
@@ -439,7 +436,7 @@ public class JsonLoggerTests extends ESTestCase {
 
     private Map<Function<JsonLogLine, Object>, Object> mapOfParamsToCheck(
         String type, Level level, String nodeName, String component, String message) {
-        return Map.of(JsonLogLine::getType, type,
+        return Map.of(JsonLogLine::getDataset, type,
             JsonLogLine::getLevel, level.toString(),
             JsonLogLine::getNodeName, nodeName,
             JsonLogLine::getComponent, component,

+ 3 - 0
qa/logging-config/src/test/java/org/elasticsearch/qa/custom_logging/ESJsonLogsConfigIT.java

@@ -46,8 +46,11 @@ public class ESJsonLogsConfigIT extends JsonLogsIntegTestCase {
     protected BufferedReader openReader(Path logFile) {
         assumeFalse("Skipping test because it is being run against an external cluster.",
             logFile.getFileName().toString().equals("--external--"));
+
         return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> {
             try {
+                String temp = Files.readString(logFile);
+
                 return Files.newBufferedReader(logFile, StandardCharsets.UTF_8);
             } catch (IOException e) {
                 throw new RuntimeException(e);

+ 4 - 4
qa/logging-config/src/test/resources/org/elasticsearch/common/logging/json_layout/log4j2.properties

@@ -1,26 +1,26 @@
 appender.console.type = Console
 appender.console.name = console
 appender.console.layout.type = ECSJsonLayout
-appender.console.layout.type_name = console
+appender.console.layout.dataset = elasticsearch.console
 
 
 appender.file.type = File
 appender.file.name = file
 appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.json
 appender.file.layout.type = ECSJsonLayout
-appender.file.layout.type_name = file
+appender.file.layout.dataset = elasticsearch.file
 
 appender.deprecated.type = File
 appender.deprecated.name = deprecated
 appender.deprecated.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecated.json
 appender.deprecated.layout.type = ECSJsonLayout
-appender.deprecated.layout.type_name = deprecation
+appender.deprecated.layout.dataset = elasticsearch.deprecation
 appender.deprecated.filter.rate_limit.type = RateLimitingFilter
 
 appender.deprecatedconsole.type = Console
 appender.deprecatedconsole.name = deprecatedconsole
 appender.deprecatedconsole.layout.type = ECSJsonLayout
-appender.deprecatedconsole.layout.type_name = deprecation
+appender.deprecatedconsole.layout.dataset = elasticsearch.deprecation
 appender.deprecatedconsole.filter.rate_limit.type = RateLimitingFilter
 
 

+ 8 - 6
server/src/main/java/org/elasticsearch/common/logging/DeprecatedMessage.java

@@ -25,21 +25,23 @@ import org.elasticsearch.common.SuppressLoggerChecks;
 import java.util.Locale;
 
 /**
- * A logger message used by {@link DeprecationLogger}.
- * Carries x-opaque-id field if provided in the headers. Will populate the x-opaque-id field in JSON logs.
+ * A logger message used by {@link DeprecationLogger}, enriched with fields
+ * named following ECS conventions. Carries x-opaque-id field if provided in the headers.
+ * Will populate the x-opaque-id field in JSON logs.
  */
 public class DeprecatedMessage  {
-    public static final String X_OPAQUE_ID_FIELD_NAME = "x-opaque-id";
-    public static final String ECS_VERSION = "1.6";
+    public static final String KEY_FIELD_NAME = "event.code";
+    public static final String X_OPAQUE_ID_FIELD_NAME = "elasticsearch.http.request.x_opaque_id";
+    public static final String ECS_VERSION = "1.7";
 
     @SuppressLoggerChecks(reason = "safely delegates to logger")
     public static ESLogMessage of(DeprecationCategory category, String key, String xOpaqueId, String messagePattern, Object... args) {
         ESLogMessage esLogMessage = new ESLogMessage(messagePattern, args)
             .field("data_stream.type", "logs")
-            .field("data_stream.dataset", "deprecation.elasticsearch")
+            .field("data_stream.dataset", "elasticsearch.deprecation")
             .field("data_stream.namespace", "default")
             .field("ecs.version", ECS_VERSION)
-            .field("key", key)
+            .field(KEY_FIELD_NAME, key)
             .field("elasticsearch.event.category", category.name().toLowerCase(Locale.ROOT));
 
         if (Strings.isNullOrEmpty(xOpaqueId)) {

+ 13 - 14
server/src/main/java/org/elasticsearch/common/logging/ECSJsonLayout.java

@@ -35,7 +35,7 @@ import java.nio.charset.StandardCharsets;
  * in order to avoid a duplication of configuration in log4j2.properties
  */
 @Plugin(name = "ECSJsonLayout", category = Node.CATEGORY, elementType = Layout.ELEMENT_TYPE, printObject = true)
-public class ECSJsonLayout  {
+public class ECSJsonLayout {
 
     @PluginBuilderFactory
     public static ECSJsonLayout.Builder newBuilder() {
@@ -45,8 +45,8 @@ public class ECSJsonLayout  {
     public static class Builder extends AbstractStringLayout.Builder<Builder>
         implements org.apache.logging.log4j.core.util.Builder<EcsLayout> {
 
-        @PluginAttribute("type_name")
-        String type;
+        @PluginAttribute("dataset")
+        String dataset;
 
         public Builder() {
             setCharset(StandardCharsets.UTF_8);
@@ -64,21 +64,20 @@ public class ECSJsonLayout  {
         }
 
         private KeyValuePair[] additionalFields() {
-            return new KeyValuePair[]{
-                new KeyValuePair("type",type),
-                new KeyValuePair("cluster.uuid","%cluster_id"),
-                new KeyValuePair("node.id","%node_id"),
-                new KeyValuePair("node.name","%ESnode_name"),
-                new KeyValuePair("cluster.name","${sys:es.logs.cluster_name}"),
-            };
+            return new KeyValuePair[] {
+                new KeyValuePair("event.dataset", dataset),
+                new KeyValuePair("elasticsearch.cluster.uuid", "%cluster_id"),
+                new KeyValuePair("elasticsearch.node.id", "%node_id"),
+                new KeyValuePair("elasticsearch.node.name", "%ESnode_name"),
+                new KeyValuePair("elasticsearch.cluster.name", "${sys:es.logs.cluster_name}"), };
         }
 
-        public String getType() {
-            return type;
+        public String getDataset() {
+            return dataset;
         }
 
-        public Builder setType(final String type) {
-            this.type = type;
+        public Builder setDataset(final String dataset) {
+            this.dataset = dataset;
             return asBuilder();
         }
     }

+ 2 - 1
server/src/main/java/org/elasticsearch/common/logging/RateLimitingFilter.java

@@ -36,6 +36,7 @@ import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
 
+import static org.elasticsearch.common.logging.DeprecatedMessage.KEY_FIELD_NAME;
 import static org.elasticsearch.common.logging.DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME;
 
 @Plugin(name = "RateLimitingFilter", category = Node.CATEGORY, elementType = Filter.ELEMENT_TYPE)
@@ -68,7 +69,7 @@ public class RateLimitingFilter extends AbstractFilter {
             final ESLogMessage esLogMessage = (ESLogMessage) message;
 
             String xOpaqueId = esLogMessage.get(X_OPAQUE_ID_FIELD_NAME);
-            final String key = esLogMessage.get("key");
+            final String key = esLogMessage.get(KEY_FIELD_NAME);
 
             return lruKeyCache.add(xOpaqueId + key) ? Result.ACCEPT : Result.DENY;
 

+ 12 - 12
test/framework/src/main/java/org/elasticsearch/common/logging/JsonLogLine.java

@@ -34,7 +34,7 @@ public class JsonLogLine {
     public static final ObjectParser<JsonLogLine, Void> ES_LOG_LINE = createESParser(true);
 
 
-    private String type;
+    private String dataset;
     private String timestamp;
     private String level;
     private String component;
@@ -49,7 +49,7 @@ public class JsonLogLine {
     @Override
     public String toString() {
         final StringBuilder sb = new StringBuilder("JsonLogLine{");
-        sb.append("type='").append(type).append('\'');
+        sb.append("dataset='").append(dataset).append('\'');
         sb.append(", timestamp='").append(timestamp).append('\'');
         sb.append(", level='").append(level).append('\'');
         sb.append(", component='").append(component).append('\'');
@@ -64,8 +64,8 @@ public class JsonLogLine {
         return sb.toString();
     }
 
-    public String getType() {
-        return type;
+    public String getDataset() {
+        return dataset;
     }
 
     public String getTimestamp() {
@@ -108,8 +108,8 @@ public class JsonLogLine {
         return stacktrace;
     }
 
-    public void setType(String type) {
-        this.type = type;
+    public void setDataset(String dataset) {
+        this.dataset = dataset;
     }
 
     public void setTimestamp(String timestamp) {
@@ -154,14 +154,14 @@ public class JsonLogLine {
 
     private static ObjectParser<JsonLogLine, Void> createECSParser(boolean ignoreUnknownFields) {
         ObjectParser<JsonLogLine, Void> parser = new ObjectParser<>("json_log_line", ignoreUnknownFields, JsonLogLine::new);
-        parser.declareString(JsonLogLine::setType, new ParseField("type"));
+        parser.declareString(JsonLogLine::setDataset, new ParseField("event.dataset"));
         parser.declareString(JsonLogLine::setTimestamp, new ParseField("@timestamp"));
         parser.declareString(JsonLogLine::setLevel, new ParseField("log.level"));
         parser.declareString(JsonLogLine::setComponent, new ParseField("log.logger"));
-        parser.declareString(JsonLogLine::setClusterName, new ParseField("cluster.name"));
-        parser.declareString(JsonLogLine::setNodeName, new ParseField("node.name"));
-        parser.declareString(JsonLogLine::setClusterUuid, new ParseField("cluster.uuid"));
-        parser.declareString(JsonLogLine::setNodeId, new ParseField("node.id"));
+        parser.declareString(JsonLogLine::setClusterName, new ParseField("elasticsearch.cluster.name"));
+        parser.declareString(JsonLogLine::setNodeName, new ParseField("elasticsearch.node.name"));
+        parser.declareString(JsonLogLine::setClusterUuid, new ParseField("elasticsearch.cluster.uuid"));
+        parser.declareString(JsonLogLine::setNodeId, new ParseField("elasticsearch.node.id"));
         parser.declareString(JsonLogLine::setMessage, new ParseField("message"));
         parser.declareStringArray(JsonLogLine::setTags, new ParseField("tags"));
         parser.declareStringArray(JsonLogLine::setStacktrace, new ParseField("error.stack_trace"));
@@ -171,7 +171,7 @@ public class JsonLogLine {
 
     private static ObjectParser<JsonLogLine, Void> createESParser(boolean ignoreUnknownFields) {
         ObjectParser<JsonLogLine, Void> parser = new ObjectParser<>("search_template", ignoreUnknownFields, JsonLogLine::new);
-        parser.declareString(JsonLogLine::setType, new ParseField("type"));
+        parser.declareString(JsonLogLine::setDataset, new ParseField("type"));
         parser.declareString(JsonLogLine::setTimestamp, new ParseField("timestamp"));
         parser.declareString(JsonLogLine::setLevel, new ParseField("level"));
         parser.declareString(JsonLogLine::setComponent, new ParseField("component"));

+ 3 - 3
test/framework/src/main/java/org/elasticsearch/common/logging/JsonLogsIntegTestCase.java

@@ -74,7 +74,7 @@ public abstract class JsonLogsIntegTestCase extends ESRestTestCase {
         try (Stream<JsonLogLine> stream = JsonLogsStream.from(openReader(getLogFile()), getParser() )) {
             stream.limit(LINES_TO_CHECK)
                   .forEach(jsonLogLine -> {
-                      assertThat(jsonLogLine.getType(), is(not(emptyOrNullString())));
+                      assertThat(jsonLogLine.getDataset(), is(not(emptyOrNullString())));
                       assertThat(jsonLogLine.getTimestamp(), is(not(emptyOrNullString())));
                       assertThat(jsonLogLine.getLevel(), is(not(emptyOrNullString())));
                       assertThat(jsonLogLine.getComponent(), is(not(emptyOrNullString())));
@@ -88,14 +88,14 @@ public abstract class JsonLogsIntegTestCase extends ESRestTestCase {
     }
 
     private JsonLogLine findFirstLine() throws IOException {
-        try (Stream<JsonLogLine> stream = JsonLogsStream.from(openReader(getLogFile()))) {
+        try (Stream<JsonLogLine> stream = JsonLogsStream.from(openReader(getLogFile()), getParser())) {
             return stream.findFirst()
                          .orElseThrow(() -> new AssertionError("no logs at all?!"));
         }
     }
 
     public void testNodeIdAndClusterIdConsistentOnceAvailable() throws IOException {
-        try (Stream<JsonLogLine> stream = JsonLogsStream.from(openReader(getLogFile()))) {
+        try (Stream<JsonLogLine> stream = JsonLogsStream.from(openReader(getLogFile()), getParser())) {
             Iterator<JsonLogLine> iterator = stream.iterator();
 
             JsonLogLine firstLine = null;

+ 29 - 25
x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java

@@ -36,6 +36,8 @@ import java.util.Map;
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
+import static org.elasticsearch.common.logging.DeprecatedMessage.KEY_FIELD_NAME;
+import static org.elasticsearch.common.logging.DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME;
 import static org.elasticsearch.test.hamcrest.RegexMatcher.matches;
 import static org.hamcrest.Matchers.allOf;
 import static org.hamcrest.Matchers.containsString;
@@ -62,16 +64,16 @@ public class DeprecationHttpIT extends ESRestTestCase {
             .startObject("transient")
             .field(
                 TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1.getKey(),
-                !TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1.getDefault(Settings.EMPTY)
+                TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1.getDefault(Settings.EMPTY) == false
             )
             .field(
                 TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE2.getKey(),
-                !TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE2.getDefault(Settings.EMPTY)
+                TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE2.getDefault(Settings.EMPTY) == false
             )
             // There should be no warning for this field
             .field(
                 TestDeprecationHeaderRestAction.TEST_NOT_DEPRECATED_SETTING.getKey(),
-                !TestDeprecationHeaderRestAction.TEST_NOT_DEPRECATED_SETTING.getDefault(Settings.EMPTY)
+                TestDeprecationHeaderRestAction.TEST_NOT_DEPRECATED_SETTING.getDefault(Settings.EMPTY) == false
             )
             .endObject()
             .endObject();
@@ -274,37 +276,39 @@ public class DeprecationHttpIT extends ESRestTestCase {
                     hasItems(
                         allOf(
                             hasKey("@timestamp"),
-                            hasKey("cluster.name"),
-                            hasKey("cluster.uuid"),
-                            hasKey("log.logger"),
-                            hasEntry("data_stream.dataset", "deprecation.elasticsearch"),
+                            hasKey("elasticsearch.cluster.name"),
+                            hasKey("elasticsearch.cluster.uuid"),
+                            hasEntry(X_OPAQUE_ID_FIELD_NAME, "some xid"),
+                            hasEntry("elasticsearch.event.category", "settings"),
+                            hasKey("elasticsearch.node.id"),
+                            hasKey("elasticsearch.node.name"),
+                            hasEntry("data_stream.dataset", "elasticsearch.deprecation"),
                             hasEntry("data_stream.namespace", "default"),
                             hasEntry("data_stream.type", "logs"),
-                            hasEntry("ecs.version", "1.6"),
-                            hasEntry("key", "deprecated_settings"),
+                            hasEntry("ecs.version", "1.7"),
+                            hasEntry(KEY_FIELD_NAME, "deprecated_settings"),
+                            hasEntry("event.dataset", "elasticsearch.deprecation"),
                             hasEntry("log.level", "DEPRECATION"),
-                            hasEntry("message", "[deprecated_settings] usage is deprecated. use [settings] instead"),
-                            hasKey("node.id"),
-                            hasKey("node.name"),
-                            hasEntry("x-opaque-id", "some xid"),
-                            hasEntry("elasticsearch.event.category", "settings")
+                            hasKey("log.logger"),
+                            hasEntry("message", "[deprecated_settings] usage is deprecated. use [settings] instead")
                         ),
                         allOf(
                             hasKey("@timestamp"),
-                            hasKey("cluster.name"),
-                            hasKey("cluster.uuid"),
-                            hasKey("log.logger"),
-                            hasEntry("data_stream.dataset", "deprecation.elasticsearch"),
+                            hasKey("elasticsearch.cluster.name"),
+                            hasKey("elasticsearch.cluster.uuid"),
+                            hasEntry(X_OPAQUE_ID_FIELD_NAME, "some xid"),
+                            hasEntry("elasticsearch.event.category", "api"),
+                            hasKey("elasticsearch.node.id"),
+                            hasKey("elasticsearch.node.name"),
+                            hasEntry("data_stream.dataset", "elasticsearch.deprecation"),
                             hasEntry("data_stream.namespace", "default"),
                             hasEntry("data_stream.type", "logs"),
-                            hasEntry("ecs.version", "1.6"),
-                            hasEntry("key", "deprecated_route"),
+                            hasEntry("ecs.version", "1.7"),
+                            hasEntry(KEY_FIELD_NAME, "deprecated_route"),
+                            hasEntry("event.dataset", "elasticsearch.deprecation"),
                             hasEntry("log.level", "DEPRECATION"),
-                            hasEntry("message", "[/_test_cluster/deprecated_settings] exists for deprecated tests"),
-                            hasKey("node.id"),
-                            hasKey("node.name"),
-                            hasEntry("x-opaque-id", "some xid"),
-                            hasEntry("elasticsearch.event.category", "api")
+                            hasKey("log.logger"),
+                            hasEntry("message", "[/_test_cluster/deprecated_settings] exists for deprecated tests")
                         )
                     )
                 );

+ 4 - 1
x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingComponent.java

@@ -64,7 +64,10 @@ public class DeprecationIndexingComponent extends AbstractLifecycleComponent imp
         final LoggerContext context = (LoggerContext) LogManager.getContext(false);
         final Configuration configuration = context.getConfiguration();
 
-        final EcsLayout ecsLayout = ECSJsonLayout.newBuilder().setType("deprecation").setConfiguration(configuration).build();
+        final EcsLayout ecsLayout = ECSJsonLayout.newBuilder()
+            .setDataset("elasticsearch.deprecation")
+            .setConfiguration(configuration)
+            .build();
 
         this.filter = new RateLimitingFilter();
         this.appender = new DeprecationIndexingAppender("deprecation_indexing_appender", filter, ecsLayout, consumer);