|
@@ -241,7 +241,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
|
|
|
@Test
|
|
|
public void testSegments() throws Exception {
|
|
|
- List<Segment> segments = engine.segments();
|
|
|
+ List<Segment> segments = engine.segments(false);
|
|
|
assertThat(segments.isEmpty(), equalTo(true));
|
|
|
assertThat(engine.segmentsStats().getCount(), equalTo(0l));
|
|
|
assertThat(engine.segmentsStats().getMemoryInBytes(), equalTo(0l));
|
|
@@ -255,7 +255,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
|
|
engine.refresh("test", false);
|
|
|
|
|
|
- segments = engine.segments();
|
|
|
+ segments = engine.segments(false);
|
|
|
assertThat(segments.size(), equalTo(1));
|
|
|
SegmentsStats stats = engine.segmentsStats();
|
|
|
assertThat(stats.getCount(), equalTo(1l));
|
|
@@ -269,10 +269,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
assertThat(segments.get(0).getNumDocs(), equalTo(2));
|
|
|
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
|
|
|
assertThat(segments.get(0).isCompound(), equalTo(defaultCompound));
|
|
|
+ assertThat(segments.get(0).ramTree, nullValue());
|
|
|
|
|
|
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
|
|
|
|
|
- segments = engine.segments();
|
|
|
+ segments = engine.segments(false);
|
|
|
assertThat(segments.size(), equalTo(1));
|
|
|
assertThat(engine.segmentsStats().getCount(), equalTo(1l));
|
|
|
assertThat(segments.get(0).isCommitted(), equalTo(true));
|
|
@@ -287,7 +288,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
engine.create(new Engine.Create(null, newUid("3"), doc3));
|
|
|
engine.refresh("test", false);
|
|
|
|
|
|
- segments = engine.segments();
|
|
|
+ segments = engine.segments(false);
|
|
|
assertThat(segments.size(), equalTo(2));
|
|
|
assertThat(engine.segmentsStats().getCount(), equalTo(2l));
|
|
|
assertThat(engine.segmentsStats().getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes()));
|
|
@@ -313,7 +314,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
engine.delete(new Engine.Delete("test", "1", newUid("1")));
|
|
|
engine.refresh("test", false);
|
|
|
|
|
|
- segments = engine.segments();
|
|
|
+ segments = engine.segments(false);
|
|
|
assertThat(segments.size(), equalTo(2));
|
|
|
assertThat(engine.segmentsStats().getCount(), equalTo(2l));
|
|
|
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
|
|
@@ -334,7 +335,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
engine.create(new Engine.Create(null, newUid("4"), doc4));
|
|
|
engine.refresh("test", false);
|
|
|
|
|
|
- segments = engine.segments();
|
|
|
+ segments = engine.segments(false);
|
|
|
assertThat(segments.size(), equalTo(3));
|
|
|
assertThat(engine.segmentsStats().getCount(), equalTo(3l));
|
|
|
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
|
|
@@ -356,6 +357,33 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
|
|
|
assertThat(segments.get(2).isCompound(), equalTo(true));
|
|
|
}
|
|
|
+
|
|
|
+ public void testVerboseSegments() throws Exception {
|
|
|
+ List<Segment> segments = engine.segments(true);
|
|
|
+ assertThat(segments.isEmpty(), equalTo(true));
|
|
|
+
|
|
|
+ ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
|
|
+ engine.create(new Engine.Create(null, newUid("1"), doc));
|
|
|
+ engine.refresh("test", false);
|
|
|
+
|
|
|
+ segments = engine.segments(true);
|
|
|
+ assertThat(segments.size(), equalTo(1));
|
|
|
+ assertThat(segments.get(0).ramTree, notNullValue());
|
|
|
+
|
|
|
+ ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_2, false);
|
|
|
+ engine.create(new Engine.Create(null, newUid("2"), doc2));
|
|
|
+ engine.refresh("test", false);
|
|
|
+ ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_3, false);
|
|
|
+ engine.create(new Engine.Create(null, newUid("3"), doc3));
|
|
|
+ engine.refresh("test", false);
|
|
|
+
|
|
|
+ segments = engine.segments(true);
|
|
|
+ assertThat(segments.size(), equalTo(3));
|
|
|
+ assertThat(segments.get(0).ramTree, notNullValue());
|
|
|
+ assertThat(segments.get(1).ramTree, notNullValue());
|
|
|
+ assertThat(segments.get(2).ramTree, notNullValue());
|
|
|
+
|
|
|
+ }
|
|
|
|
|
|
public void testStartAndAcquireConcurrently() throws IOException {
|
|
|
ConcurrentMergeSchedulerProvider mergeSchedulerProvider = new ConcurrentMergeSchedulerProvider(shardId, EMPTY_SETTINGS, threadPool, new IndexSettingsService(shardId.index(), EMPTY_SETTINGS));
|
|
@@ -419,19 +447,21 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
|
|
engine.index(index);
|
|
|
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
|
|
- assertThat(engine.segments().size(), equalTo(1));
|
|
|
+ assertThat(engine.segments(false).size(), equalTo(1));
|
|
|
index = new Engine.Index(null, newUid("2"), doc);
|
|
|
engine.index(index);
|
|
|
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
|
|
- assertThat(engine.segments().size(), equalTo(2));
|
|
|
- for (Segment segment : engine.segments()) {
|
|
|
+ List<Segment> segments = engine.segments(false);
|
|
|
+ assertThat(segments.size(), equalTo(2));
|
|
|
+ for (Segment segment : segments) {
|
|
|
assertThat(segment.getMergeId(), nullValue());
|
|
|
}
|
|
|
index = new Engine.Index(null, newUid("3"), doc);
|
|
|
engine.index(index);
|
|
|
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
|
|
- assertThat(engine.segments().size(), equalTo(3));
|
|
|
- for (Segment segment : engine.segments()) {
|
|
|
+ segments = engine.segments(false);
|
|
|
+ assertThat(segments.size(), equalTo(3));
|
|
|
+ for (Segment segment : segments) {
|
|
|
assertThat(segment.getMergeId(), nullValue());
|
|
|
}
|
|
|
|
|
@@ -440,7 +470,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
engine.forceMerge(false, false);
|
|
|
waitTillMerge.get().await();
|
|
|
|
|
|
- for (Segment segment : engine.segments()) {
|
|
|
+ for (Segment segment : engine.segments(false)) {
|
|
|
assertThat(segment.getMergeId(), notNullValue());
|
|
|
}
|
|
|
|
|
@@ -453,7 +483,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
// now, optimize and wait for merges, see that we have no merge flag
|
|
|
engine.forceMerge(true, true);
|
|
|
|
|
|
- for (Segment segment : engine.segments()) {
|
|
|
+ for (Segment segment : engine.segments(false)) {
|
|
|
assertThat(segment.getMergeId(), nullValue());
|
|
|
}
|
|
|
// we could have multiple underlying merges, so the generation may increase more than once
|
|
@@ -463,7 +493,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|
|
final long gen2 = store.readLastCommittedSegmentsInfo().getGeneration();
|
|
|
engine.forceMerge(flush, false);
|
|
|
waitTillMerge.get().await();
|
|
|
- for (Segment segment : engine.segments()) {
|
|
|
+ for (Segment segment : engine.segments(false)) {
|
|
|
assertThat(segment.getMergeId(), nullValue());
|
|
|
}
|
|
|
waitForMerge.get().countDown();
|