Browse Source

Dates: Be backwards compatible with pre 2.x indices

In order to be backwards compatible, indices created before 2.x must support
indexing of a unix timestamp and its configured date format. Indices created
with 2.x must configure the `epoch_millis` date formatter in order to
support this.

Relates #10971
Alexander Reelsen 10 years ago
parent
commit
23cf9af495

+ 8 - 3
core/src/main/java/org/elasticsearch/action/index/IndexRequest.java

@@ -22,6 +22,7 @@ package org.elasticsearch.action.index;
 import com.google.common.base.Charsets;
 import org.elasticsearch.ElasticsearchGenerationException;
 import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.Version;
 import org.elasticsearch.action.*;
 import org.elasticsearch.action.support.replication.ReplicationRequest;
 import org.elasticsearch.client.Requests;
@@ -562,8 +563,10 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
         routing(metaData.resolveIndexRouting(routing, index));
         // resolve timestamp if provided externally
         if (timestamp != null) {
+            Version version = Version.indexCreated(metaData.getIndices().get(concreteIndex).settings());
             timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp,
-                    mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER);
+                    mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER,
+                    version);
         }
         // extract values if needed
         if (mappingMd != null) {
@@ -586,7 +589,8 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
                     if (parseContext.shouldParseTimestamp()) {
                         timestamp = parseContext.timestamp();
                         if (timestamp != null) {
-                            timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, mappingMd.timestamp().dateTimeFormatter());
+                            Version version = Version.indexCreated(metaData.getIndices().get(concreteIndex).settings());
+                            timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, mappingMd.timestamp().dateTimeFormatter(), version);
                         }
                     }
                 } catch (MapperParsingException e) {
@@ -638,7 +642,8 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
             if (defaultTimestamp.equals(TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP)) {
                 timestamp = Long.toString(System.currentTimeMillis());
             } else {
-                timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter());
+                Version version = Version.indexCreated(metaData.getIndices().get(concreteIndex).settings());
+                timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter(), version);
             }
         }
     }

+ 15 - 2
core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java

@@ -19,6 +19,7 @@
 
 package org.elasticsearch.cluster.metadata;
 
+import org.elasticsearch.Version;
 import org.elasticsearch.action.TimestampParsingException;
 import org.elasticsearch.cluster.AbstractDiffable;
 import org.elasticsearch.common.Nullable;
@@ -160,10 +161,22 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
 
     public static class Timestamp {
 
-        public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException {
+        private static final FormatDateTimeFormatter EPOCH_MILLIS_PARSER = Joda.forPattern("epoch_millis");
+
+        public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter,
+                                                  Version version) throws TimestampParsingException {
             try {
-                return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString));
+                // no need for unix timestamp parsing in 2.x
+                FormatDateTimeFormatter formatter = version.onOrAfter(Version.V_2_0_0) ? dateTimeFormatter : EPOCH_MILLIS_PARSER;
+                return Long.toString(formatter.parser().parseMillis(timestampAsString));
             } catch (RuntimeException e) {
+                if (version.before(Version.V_2_0_0)) {
+                    try {
+                        return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString));
+                    } catch (RuntimeException e1) {
+                        throw new TimestampParsingException(timestampAsString, e1);
+                    }
+                }
                 throw new TimestampParsingException(timestampAsString, e);
             }
         }

+ 16 - 16
core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java

@@ -30,6 +30,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util.ToStringUtils;
+import org.elasticsearch.Version;
 import org.elasticsearch.action.fieldstats.FieldStats;
 import org.elasticsearch.common.Explicit;
 import org.elasticsearch.common.Nullable;
@@ -46,18 +47,17 @@ import org.elasticsearch.common.xcontent.XContentParser;
 import org.elasticsearch.index.analysis.NamedAnalyzer;
 import org.elasticsearch.index.analysis.NumericDateAnalyzer;
 import org.elasticsearch.index.fielddata.FieldDataType;
-import org.elasticsearch.index.mapper.*;
+import org.elasticsearch.index.mapper.MappedFieldType;
+import org.elasticsearch.index.mapper.Mapper;
+import org.elasticsearch.index.mapper.MapperParsingException;
+import org.elasticsearch.index.mapper.ParseContext;
 import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
 import org.elasticsearch.index.query.QueryParseContext;
 import org.elasticsearch.search.internal.SearchContext;
 import org.joda.time.DateTimeZone;
 
 import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Objects;
+import java.util.*;
 import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
@@ -70,7 +70,7 @@ public class DateFieldMapper extends NumberFieldMapper {
     public static final String CONTENT_TYPE = "date";
 
     public static class Defaults extends NumberFieldMapper.Defaults {
-        public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime", Locale.ROOT);
+        public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime||epoch_millis", Locale.ROOT);
         public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS;
         public static final DateFieldType FIELD_TYPE = new DateFieldType();
 
@@ -126,6 +126,14 @@ public class DateFieldMapper extends NumberFieldMapper {
 
         protected void setupFieldType(BuilderContext context) {
             FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
+            // TODO MOVE ME OUTSIDE OF THIS SPACE?
+            if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0)) {
+                boolean includesEpochFormatter = dateTimeFormatter.format().contains("epoch_");
+                if (!includesEpochFormatter) {
+                    String format = fieldType().timeUnit().equals(TimeUnit.SECONDS) ? "epoch_second" : "epoch_millis";
+                    fieldType().setDateTimeFormatter(Joda.forPattern(format + "||" + dateTimeFormatter.format()));
+                }
+            }
             if (!locale.equals(dateTimeFormatter.locale())) {
                 fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale));
             }
@@ -308,15 +316,7 @@ public class DateFieldMapper extends NumberFieldMapper {
         }
 
         protected long parseStringValue(String value) {
-            try {
-                return dateTimeFormatter().parser().parseMillis(value);
-            } catch (RuntimeException e) {
-                try {
-                    return timeUnit().toMillis(Long.parseLong(value));
-                } catch (NumberFormatException e1) {
-                    throw new MapperParsingException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter().format() + "], and timestamp number with locale [" + dateTimeFormatter().locale() + "]", e);
-                }
-            }
+            return dateTimeFormatter().parser().parseMillis(value);
         }
 
         @Override

+ 3 - 0
core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java

@@ -147,6 +147,9 @@ public class RootObjectMapper extends ObjectMapper {
                 List<FormatDateTimeFormatter> dateTimeFormatters = newArrayList();
                 if (fieldNode instanceof List) {
                     for (Object node1 : (List) fieldNode) {
+                        if (node1.toString().startsWith("epoch_")) {
+                            throw new MapperParsingException("Epoch ["+ node1.toString() +"] is not supported as dynamic date format");
+                        }
                         dateTimeFormatters.add(parseDateTimeFormatter(node1));
                     }
                 } else if ("none".equals(fieldNode.toString())) {

+ 12 - 6
core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java

@@ -29,19 +29,14 @@ import java.io.IOException;
 public class RangeQueryBuilder extends MultiTermQueryBuilder implements BoostableQueryBuilder<RangeQueryBuilder> {
 
     private final String name;
-
     private Object from;
-
     private Object to;
     private String timeZone;
-
     private boolean includeLower = true;
-
     private boolean includeUpper = true;
-
     private float boost = -1;
-
     private String queryName;
+    private String format;
 
     /**
      * A Query that matches documents within an range of terms.
@@ -406,6 +401,14 @@ public class RangeQueryBuilder extends MultiTermQueryBuilder implements Boostabl
         return this;
     }
 
+    /**
+     * In case of date field, we can set the format to be used instead of the mapper format
+     */
+    public RangeQueryBuilder format(String format) {
+        this.format = format;
+        return this;
+    }
+
     @Override
     protected void doXContent(XContentBuilder builder, Params params) throws IOException {
         builder.startObject(RangeQueryParser.NAME);
@@ -415,6 +418,9 @@ public class RangeQueryBuilder extends MultiTermQueryBuilder implements Boostabl
         if (timeZone != null) {
             builder.field("time_zone", timeZone);
         }
+        if (format != null) {
+            builder.field("format", format);
+        }
         builder.field("include_lower", includeLower);
         builder.field("include_upper", includeUpper);
         if (boost != -1) {

+ 2 - 9
core/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java

@@ -181,7 +181,7 @@ public class SimpleCountTests extends ElasticsearchIntegrationTest {
     }
 
     @Test
-    public void testThatNonEpochDatesCanBeSearch() throws Exception {
+    public void testThatNonEpochDatesCanBeSearched() throws Exception {
         assertAcked(prepareCreate("test")
                 .addMapping("type1",
                     jsonBuilder().startObject().startObject("type1")
@@ -201,16 +201,9 @@ public class SimpleCountTests extends ElasticsearchIntegrationTest {
                 .endObject();
         assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true));
 
-        // this is a timestamp in 2015 and should not be returned in counting when filtering by year
-        document = jsonBuilder()
-                .startObject()
-                .field("date_field", "1433236702")
-                .endObject();
-        assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true));
-
         refresh();
 
-        assertHitCount(client().prepareCount("test").get(), 3);
+        assertHitCount(client().prepareCount("test").get(), 2);
 
         CountResponse countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("date_field").from("2015010100").to("2015123123")).get();
         assertHitCount(countResponse, 1);

+ 205 - 0
core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java

@@ -0,0 +1,205 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.mapper.date;
+
+import com.google.common.collect.Lists;
+import org.elasticsearch.Version;
+import org.elasticsearch.action.index.IndexResponse;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.index.mapper.MapperParsingException;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.test.ElasticsearchSingleNodeTest;
+import org.junit.Before;
+
+import java.util.List;
+
+import static org.elasticsearch.common.settings.Settings.settingsBuilder;
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.test.VersionUtils.randomVersionBetween;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits;
+import static org.hamcrest.Matchers.is;
+
+/**
+ * Test class to check for all the conditions defined in
+ * https://github.com/elastic/elasticsearch/issues/10971
+ */
+public class DateBackwardsCompatibilityTests extends ElasticsearchSingleNodeTest {
+
+    private String index = "testindex";
+    private String type = "testtype";
+    private Version randomVersionBelow2x;
+
+    @Before
+    public void setup() throws Exception {
+        randomVersionBelow2x = randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1);
+    }
+
+    public void testThatPre2xIndicesNumbersAreTreatedAsEpochs() throws Exception {
+        index = createPre2xIndexAndMapping();
+        long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015
+        XContentBuilder document = jsonBuilder().startObject().field("date_field", dateInMillis).endObject();
+        index(document);
+
+        // search for date in time range
+        QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24");
+        SearchResponse response = client().prepareSearch(index).setQuery(query).get();
+        assertHitCount(response, 1);
+    }
+
+    public void testThatPre2xFailedStringParsingLeadsToEpochParsing() throws Exception {
+        index = createPre2xIndexAndMapping();
+        long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015
+        String date = String.valueOf(dateInMillis);
+        XContentBuilder document = jsonBuilder().startObject().field("date_field", date).endObject();
+        index(document);
+
+        // search for date in time range
+        QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24");
+        SearchResponse response = client().prepareSearch(index).setQuery(query).get();
+        assertHitCount(response, 1);
+    }
+
+    public void testThatPre2xSupportsUnixTimestampsInAnyDateFormat() throws Exception {
+        long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015
+        List<String> dateFormats = Lists.newArrayList("dateOptionalTime", "weekDate", "tTime", "ordinalDate", "hourMinuteSecond", "hourMinute");
+
+        for (String format : dateFormats) {
+            XContentBuilder mapping = jsonBuilder().startObject().startObject("properties")
+                    .startObject("date_field").field("type", "date").field("format", format).endObject()
+                    .endObject().endObject();
+
+            index = createIndex(randomVersionBelow2x, mapping);
+
+            XContentBuilder document = XContentFactory.jsonBuilder()
+                    .startObject()
+                    .field("date_field", String.valueOf(dateInMillis))
+                    .endObject();
+            index(document);
+
+            // indexing as regular timestamp should work as well
+            document = XContentFactory.jsonBuilder()
+                    .startObject()
+                    .field("date_field", dateInMillis)
+                    .endObject();
+            index(document);
+
+            client().admin().indices().prepareDelete(index).get();
+        }
+    }
+
+    public void testThatPre2xIndicesNumbersAreTreatedAsTimestamps() throws Exception {
+        // looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format
+        long date = 2015062301000l;
+
+        XContentBuilder mapping = jsonBuilder().startObject().startObject("properties")
+                .startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject()
+                .endObject().endObject();
+        index = createIndex(randomVersionBelow2x, mapping);
+
+        XContentBuilder document = XContentFactory.jsonBuilder()
+                .startObject()
+                .field("date_field", randomBoolean() ? String.valueOf(date) : date)
+                .endObject();
+        index(document);
+
+        // no results in expected time range
+        QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime");
+        SearchResponse response = client().prepareSearch(index).setQuery(query).get();
+        assertNoSearchHits(response);
+
+        // result in unix timestamp range
+        QueryBuilder timestampQuery = QueryBuilders.rangeQuery("date_field").from(2015062300000L).to(2015062302000L);
+        assertHitCount(client().prepareSearch(index).setQuery(timestampQuery).get(), 1);
+
+        // result should also work with regular specified dates
+        QueryBuilder regularTimeQuery = QueryBuilders.rangeQuery("date_field").from("2033-11-08").to("2033-11-09").format("dateOptionalTime");
+        assertHitCount(client().prepareSearch(index).setQuery(regularTimeQuery).get(), 1);
+    }
+
+    public void testThatPost2xIndicesNumbersAreTreatedAsStrings() throws Exception {
+        // looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format
+        long date = 2015062301000l;
+
+        XContentBuilder mapping = jsonBuilder().startObject().startObject("properties")
+                .startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject()
+                .endObject().endObject();
+        index = createIndex(Version.CURRENT, mapping);
+
+        XContentBuilder document = XContentFactory.jsonBuilder()
+                .startObject()
+                .field("date_field", String.valueOf(date))
+                .endObject();
+        index(document);
+
+        document = XContentFactory.jsonBuilder()
+                .startObject()
+                .field("date_field", date)
+                .endObject();
+        index(document);
+
+        // search for date in time range
+        QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime");
+        SearchResponse response = client().prepareSearch(index).setQuery(query).get();
+        assertHitCount(response, 2);
+    }
+
+    public void testDynamicDateDetectionIn2xDoesNotSupportEpochs() throws Exception {
+        try {
+            XContentBuilder mapping = jsonBuilder().startObject()
+                    .startArray("dynamic_date_formats").value("dateOptionalTime").value("epoch_seconds").endArray()
+                    .endObject();
+            createIndex(Version.CURRENT, mapping);
+            fail("Expected a MapperParsingException, but did not happen");
+        } catch (MapperParsingException e) {
+            assertThat(e.getMessage(), is("mapping [" + type + "]"));
+        }
+    }
+
+    private String createPre2xIndexAndMapping() throws Exception {
+        return createIndexAndMapping(randomVersionBelow2x);
+    }
+
+    private String createIndexAndMapping(Version version) throws Exception {
+        XContentBuilder mapping = jsonBuilder().startObject().startObject("properties")
+                .startObject("date_field").field("type", "date").field("format", "dateOptionalTime").endObject()
+                .endObject().endObject();
+
+        return createIndex(version, mapping);
+    }
+
+    private String createIndex(Version version, XContentBuilder mapping) {
+        Settings settings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
+        createIndex(index, settings, type, mapping);
+
+        ensureGreen(index);
+        return index;
+    }
+
+    private void index(XContentBuilder document) {
+        IndexResponse indexResponse = client().prepareIndex(index, type).setSource(document).setRefresh(true).get();
+        assertThat(indexResponse.isCreated(), is(true));
+    }
+}

+ 34 - 9
core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java

@@ -25,7 +25,9 @@ import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.search.NumericRangeQuery;
 import org.apache.lucene.util.Constants;
+import org.elasticsearch.Version;
 import org.elasticsearch.action.index.IndexResponse;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.unit.TimeValue;
 import org.elasticsearch.common.util.LocaleUtils;
@@ -53,6 +55,7 @@ import java.util.*;
 import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean;
 import static org.elasticsearch.common.settings.Settings.settingsBuilder;
 import static org.elasticsearch.index.mapper.string.SimpleStringMappingTests.docValuesType;
+import static org.elasticsearch.test.VersionUtils.randomVersionBetween;
 import static org.hamcrest.Matchers.*;
 
 public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
@@ -147,12 +150,21 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
     int i = 0;
 
     private DocumentMapper mapper(String type, String mapping) throws IOException {
+        return mapper(type, mapping, Version.CURRENT);
+    }
+
+    private DocumentMapper mapper(String type, String mapping, Version version) throws IOException {
         final String indexName = "test-" + (i++);
-        IndexService index = createIndex(indexName);
+        IndexService index;
+        if (version.equals(Version.CURRENT)) {
+            index = createIndex(indexName);
+        } else {
+            index = createIndex(indexName, settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build());
+        }
         client().admin().indices().preparePutMapping(indexName).setType(type).setSource(mapping).get();
         return index.mapperService().documentMapper(type);
     }
-    
+
     private void assertNumericTokensEqual(ParsedDocument doc, DocumentMapper defaultMapper, String fieldA, String fieldB) throws IOException {
         assertThat(doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
         assertThat(doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
@@ -181,15 +193,15 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
                 .endObject().endObject().string();
 
         DocumentMapper defaultMapper = mapper("type", mapping);
-
         long value = System.currentTimeMillis();
+
         ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
                 .startObject()
                 .field("date_field", value)
                 .endObject()
                 .bytes());
 
-        assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
+         assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
     }
 
     public void testDateDetection() throws Exception {
@@ -290,7 +302,8 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
                     .endObject()
                     .bytes());
         } catch (MapperParsingException e) {
-            assertThat(e.getCause(), instanceOf(MapperParsingException.class));
+            assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
+            assertThat(e.getMessage(), is("failed to parse [field2]"));
         }
 
         // Verify that the default is false
@@ -301,7 +314,8 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
                     .endObject()
                     .bytes());
         } catch (MapperParsingException e) {
-            assertThat(e.getCause(), instanceOf(MapperParsingException.class));
+            assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
+            assertThat(e.getMessage(), is("failed to parse [field3]"));
         }
 
         // Unless the global ignore_malformed option is set to true
@@ -322,7 +336,8 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
                     .endObject()
                     .bytes());
         } catch (MapperParsingException e) {
-            assertThat(e.getCause(), instanceOf(MapperParsingException.class));
+            assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
+            assertThat(e.getMessage(), is("failed to parse [field2]"));
         }
     }
 
@@ -399,12 +414,12 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
         throw new AssertionError("missing");
     }
 
-    public void testNumericResolution() throws Exception {
+    public void testNumericResolutionBackwardsCompat() throws Exception {
         String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
                 .startObject("properties").startObject("date_field").field("type", "date").field("format", "date_time").field("numeric_resolution", "seconds").endObject().endObject()
                 .endObject().endObject().string();
 
-        DocumentMapper defaultMapper = mapper("type", mapping);
+        DocumentMapper defaultMapper = mapper("type", mapping, Version.V_0_90_0);
 
         // provided as an int
         ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
@@ -429,6 +444,16 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
                 .endObject()
                 .bytes());
         assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(44000L));
+
+        // expected to fail due to field epoch date formatters not being set
+        DocumentMapper currentMapper = mapper("type", mapping);
+        try {
+            currentMapper.parse("type", "2", XContentFactory.jsonBuilder()
+                    .startObject()
+                    .field("date_field", randomBoolean() ? "43" : 43)
+                    .endObject()
+                    .bytes());
+        } catch (MapperParsingException e) {}
     }
 
     public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception {

+ 54 - 15
core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java

@@ -38,12 +38,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
 import org.elasticsearch.common.xcontent.XContentParser;
 import org.elasticsearch.common.xcontent.json.JsonXContent;
 import org.elasticsearch.index.Index;
-import org.elasticsearch.index.mapper.DocumentMapper;
-import org.elasticsearch.index.mapper.DocumentMapperParser;
-import org.elasticsearch.index.mapper.MappedFieldType;
-import org.elasticsearch.index.mapper.MergeResult;
-import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.index.mapper.SourceToParse;
+import org.elasticsearch.index.mapper.*;
 import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
 import org.elasticsearch.test.ElasticsearchSingleNodeTest;
 import org.junit.Test;
@@ -57,7 +52,9 @@ import java.util.Map;
 
 import static org.elasticsearch.Version.V_1_5_0;
 import static org.elasticsearch.Version.V_2_0_0;
+import static org.elasticsearch.common.settings.Settings.settingsBuilder;
 import static org.elasticsearch.test.VersionUtils.randomVersion;
+import static org.elasticsearch.test.VersionUtils.randomVersionBetween;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.equalTo;
@@ -249,15 +246,15 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
                     .field("foo", "bar")
                 .endObject();
 
-        MetaData metaData = MetaData.builder().build();
         DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string());
+        MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
 
         MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
 
         IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
         request.process(metaData, mappingMetaData, true, "test");
         assertThat(request.timestamp(), notNullValue());
-        assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"))));
+        assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT)));
     }
 
     @Test // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
@@ -274,15 +271,15 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
                     .field("foo", "bar")
                 .endObject();
 
-        MetaData metaData = MetaData.builder().build();
         DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string());
+        MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
 
         MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
 
         IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
         request.process(metaData, mappingMetaData, true, "test");
         assertThat(request.timestamp(), notNullValue());
-        assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"))));
+        assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT)));
     }
 
     @Test // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
@@ -751,11 +748,12 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
             .startObject("_timestamp").field("enabled", true).field("path", "custom_timestamp").endObject()
             .endObject().endObject().string();
         DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping);
+        MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
 
         XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 1).endObject();
         MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
         IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
-        request.process(MetaData.builder().build(), mappingMetaData, true, "test");
+        request.process(metaData, mappingMetaData, true, "test");
 
         assertEquals(request.timestamp(), "1");
     }
@@ -766,28 +764,69 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
             .endObject().endObject().string();
         Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
         DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
+        MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
 
         XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_timestamp", 2000000).endObject();
         MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
         IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
-        request.process(MetaData.builder().build(), mappingMetaData, true, "test");
+        request.process(metaData, mappingMetaData, true, "test");
 
         // _timestamp in a document never worked, so backcompat is ignoring the field
-        assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY")), request.timestamp());
+        assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY"), Version.V_1_4_2), request.timestamp());
         assertNull(docMapper.parse("type", "1", doc.bytes()).rootDoc().get("_timestamp"));
     }
 
     public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception {
         String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
-            .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject()
+                .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject()
             .endObject().endObject().string();
         DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
+        MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
 
         XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject();
         IndexRequest request = new IndexRequest("test", "type", "1").source(doc).timestamp("2015060210");
         MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
-        request.process(MetaData.builder().build(), mappingMetaData, true, "test");
+        request.process(metaData, mappingMetaData, true, "test");
 
         assertThat(request.timestamp(), is("1433239200000"));
     }
+
+    public void testThatIndicesBefore2xMustSupportUnixTimestampsInAnyDateFormat() throws Exception {
+        String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("_timestamp").field("enabled", true).field("format", "dateOptionalTime").endObject()
+                .endObject().endObject().string();
+
+        BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes();
+
+        //
+        // test with older versions
+        Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build();
+        DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse(mapping);
+
+        MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
+
+        // both index request are successfully processed
+        IndexRequest oldIndexDateIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1970-01-01");
+        oldIndexDateIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index");
+        IndexRequest oldIndexTimestampIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1234567890");
+        oldIndexTimestampIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index");
+
+        //
+        // test with 2.x
+        DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse(mapping);
+        MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData();
+
+        // this works with 2.x
+        IndexRequest request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1970-01-01");
+        request.process(newMetaData, new MappingMetaData(currentMapper), true, "new-index");
+
+        // this fails with 2.x
+        request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1234567890");
+        try {
+            request.process(newMetaData, new MappingMetaData(currentMapper), true, "new-index");
+        } catch (Exception e) {
+            assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
+            assertThat(e.getMessage(), containsString("failed to parse timestamp [1234567890]"));
+        }
+    }
 }

+ 1 - 1
core/src/test/java/org/elasticsearch/percolator/PercolatorTests.java

@@ -1812,7 +1812,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
     @Test
     public void testPercolatorQueryWithNowRange() throws Exception {
         client().admin().indices().prepareCreate("test")
-                .addMapping("my-type", "timestamp", "type=date")
+                .addMapping("my-type", "timestamp", "type=date,format=epoch_millis")
                 .get();
         ensureGreen();
 

+ 1 - 1
core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreTests.java

@@ -449,7 +449,7 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
         assertAcked(prepareCreate("test").addMapping(
                 "type1",
                 jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
-                        .endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject()));
+                        .endObject().startObject("num1").field("type", "date").field("format", "epoch_millis").endObject().endObject().endObject().endObject()));
         ensureYellow();
         client().index(
                 indexRequest("test").type("type1").id("1")

+ 0 - 3
docs/reference/mapping/types/core-types.asciidoc

@@ -378,9 +378,6 @@ defaults to `true` or to the parent `object` type setting.
 
 |`ignore_malformed` |Ignored a malformed number. Defaults to `false`.
 
-|`numeric_resolution` |The unit to use when passed in a numeric values. Possible
-values include `seconds` and  `milliseconds` (default).
-
 |=======================================================================
 
 [float]

+ 3 - 0
docs/reference/migration/migrate_2_0.asciidoc

@@ -312,6 +312,9 @@ date.
 This is not supported anymore. If you want to store unix timestamps, you need to specify
 the appropriate formats in the mapping, namely `epoch_second` or `epoch_millis`.
 
+In addition the `numeric_resolution` mapping parameter is ignored. Use the
+`epoch_second` and `epoch_millis` date formats instead.
+
 ==== Source field limitations
 The `_source` field could previously be disabled dynamically. Since this field
 is a critical piece of many features like the Update API, it is no longer