|
@@ -31,8 +31,11 @@ import org.apache.lucene.index.LeafReaderContext;
|
|
|
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
|
|
import org.apache.lucene.index.Term;
|
|
|
import org.apache.lucene.search.IndexSearcher;
|
|
|
+import org.apache.lucene.search.MatchAllDocsQuery;
|
|
|
import org.apache.lucene.search.TermQuery;
|
|
|
import org.apache.lucene.search.join.BitSetProducer;
|
|
|
+import org.apache.lucene.store.BaseDirectoryWrapper;
|
|
|
+import org.apache.lucene.store.Directory;
|
|
|
import org.apache.lucene.store.RAMDirectory;
|
|
|
import org.apache.lucene.util.Accountable;
|
|
|
import org.apache.lucene.util.BitSet;
|
|
@@ -89,7 +92,8 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|
|
writer.addDocument(document);
|
|
|
writer.commit();
|
|
|
|
|
|
- IndexReader reader = DirectoryReader.open(writer, false);
|
|
|
+ DirectoryReader reader = DirectoryReader.open(writer, false);
|
|
|
+ reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0));
|
|
|
IndexSearcher searcher = new IndexSearcher(reader);
|
|
|
|
|
|
BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer, new BitsetFilterCache.Listener() {
|
|
@@ -114,6 +118,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|
|
writer.forceMerge(1);
|
|
|
reader.close();
|
|
|
reader = DirectoryReader.open(writer, false);
|
|
|
+ reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0));
|
|
|
searcher = new IndexSearcher(reader);
|
|
|
|
|
|
assertThat(matchCount(filter, reader), equalTo(3));
|
|
@@ -139,7 +144,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|
|
writer.addDocument(document);
|
|
|
writer.commit();
|
|
|
final DirectoryReader writerReader = DirectoryReader.open(writer, false);
|
|
|
- final IndexReader reader = randomBoolean() ? writerReader : ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", 0));
|
|
|
+ final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", 0));
|
|
|
|
|
|
final AtomicLong stats = new AtomicLong();
|
|
|
final AtomicInteger onCacheCalls = new AtomicInteger();
|
|
@@ -192,4 +197,39 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+ public void testRejectOtherIndex() throws IOException {
|
|
|
+ BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer, new BitsetFilterCache.Listener() {
|
|
|
+ @Override
|
|
|
+ public void onCache(ShardId shardId, Accountable accountable) {
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void onRemoval(ShardId shardId, Accountable accountable) {
|
|
|
+
|
|
|
+ }
|
|
|
+ });
|
|
|
+
|
|
|
+ Directory dir = newDirectory();
|
|
|
+ IndexWriter writer = new IndexWriter(
|
|
|
+ dir,
|
|
|
+ newIndexWriterConfig()
|
|
|
+ );
|
|
|
+ writer.addDocument(new Document());
|
|
|
+ DirectoryReader reader = DirectoryReader.open(writer, true);
|
|
|
+ writer.close();
|
|
|
+ reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test2"), 0));
|
|
|
+
|
|
|
+ BitSetProducer producer = cache.getBitSetProducer(new MatchAllDocsQuery());
|
|
|
+
|
|
|
+ try {
|
|
|
+ producer.getBitSet(reader.leaves().get(0));
|
|
|
+ fail();
|
|
|
+ } catch (IllegalStateException expected) {
|
|
|
+ assertEquals("Trying to load bit set for index [test2] with cache of index [test]", expected.getMessage());
|
|
|
+ } finally {
|
|
|
+ IOUtils.close(reader, dir);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
}
|