1 package org.apache.lucene.index;
4 * Licensed to the Apache Software Foundation (ASF) under one or more
5 * contributor license agreements. See the NOTICE file distributed with
6 * this work for additional information regarding copyright ownership.
7 * The ASF licenses this file to You under the Apache License, Version 2.0
8 * (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
22 import java.io.FileNotFoundException;
23 import java.io.IOException;
24 import java.util.Collection;
25 import java.util.HashSet;
26 import java.util.Iterator;
27 import java.util.List;
29 import java.util.HashMap;
30 import java.util.Random;
32 import java.util.SortedSet;
34 import org.apache.lucene.analysis.MockAnalyzer;
35 import org.apache.lucene.analysis.WhitespaceAnalyzer;
36 import org.apache.lucene.document.Document;
37 import org.apache.lucene.document.Field;
38 import org.apache.lucene.document.FieldSelector;
39 import org.apache.lucene.document.Fieldable;
40 import org.apache.lucene.document.SetBasedFieldSelector;
41 import org.apache.lucene.index.IndexReader.FieldOption;
42 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
43 import org.apache.lucene.search.FieldCache;
44 import org.apache.lucene.store.AlreadyClosedException;
45 import org.apache.lucene.store.Directory;
46 import org.apache.lucene.store.LockObtainFailedException;
47 import org.apache.lucene.store.NoSuchDirectoryException;
48 import org.apache.lucene.store.LockReleaseFailedException;
49 import org.apache.lucene.util.LuceneTestCase;
50 import org.apache.lucene.util._TestUtil;
52 public class TestIndexReader extends LuceneTestCase {
54 public void testCommitUserData() throws Exception {
55 Directory d = newDirectory();
57 Map<String,String> commitUserData = new HashMap<String,String>();
58 commitUserData.put("foo", "fighters");
61 IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
62 TEST_VERSION_CURRENT, new MockAnalyzer(random))
63 .setMaxBufferedDocs(2));
65 addDocumentWithFields(writer);
68 IndexReader r = IndexReader.open(d, false);
70 r.flush(commitUserData);
71 IndexCommit c = r.getIndexCommit();
74 SegmentInfos sis = new SegmentInfos();
76 IndexReader r2 = IndexReader.open(d, false);
77 assertEquals(c.getUserData(), commitUserData);
79 assertEquals(sis.getCurrentSegmentFileName(), c.getSegmentsFileName());
82 writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
83 new MockAnalyzer(random)).setOpenMode(
84 OpenMode.APPEND).setMaxBufferedDocs(2));
86 addDocumentWithFields(writer);
89 IndexReader r3 = IndexReader.openIfChanged(r2);
91 assertFalse(c.equals(r3.getIndexCommit()));
92 assertFalse(r2.getIndexCommit().getSegmentCount() == 1 && !r2.hasDeletions());
95 writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
96 new MockAnalyzer(random))
97 .setOpenMode(OpenMode.APPEND));
101 r3 = IndexReader.openIfChanged(r2);
103 assertEquals(1, r3.getIndexCommit().getSegmentCount());
109 public void testIsCurrent() throws Exception {
110 Directory d = newDirectory();
111 IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
112 TEST_VERSION_CURRENT, new MockAnalyzer(random)));
113 addDocumentWithFields(writer);
116 IndexReader reader = IndexReader.open(d, false);
117 assertTrue(reader.isCurrent());
118 // modify index by adding another document:
119 writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
120 new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
121 addDocumentWithFields(writer);
123 assertFalse(reader.isCurrent());
125 writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
126 new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
127 addDocumentWithFields(writer);
129 assertFalse(reader.isCurrent());
135 * Tests the IndexReader.getFieldNames implementation
136 * @throws Exception on error
138 public void testGetFieldNames() throws Exception {
139 Directory d = newDirectory();
141 IndexWriter writer = new IndexWriter(
143 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
146 Document doc = new Document();
147 doc.add(new Field("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
148 doc.add(new Field("text","test1", Field.Store.YES, Field.Index.ANALYZED));
149 doc.add(new Field("unindexed","test1", Field.Store.YES, Field.Index.NO));
150 doc.add(new Field("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
151 writer.addDocument(doc);
155 IndexReader reader = IndexReader.open(d, false);
156 Collection<String> fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
157 assertTrue(fieldNames.contains("keyword"));
158 assertTrue(fieldNames.contains("text"));
159 assertTrue(fieldNames.contains("unindexed"));
160 assertTrue(fieldNames.contains("unstored"));
162 // add more documents
163 writer = new IndexWriter(
165 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
166 setOpenMode(OpenMode.APPEND).
167 setMergePolicy(newLogMergePolicy())
169 // want to get some more segments here
170 int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
171 for (int i = 0; i < 5*mergeFactor; i++) {
172 doc = new Document();
173 doc.add(new Field("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
174 doc.add(new Field("text","test1", Field.Store.YES, Field.Index.ANALYZED));
175 doc.add(new Field("unindexed","test1", Field.Store.YES, Field.Index.NO));
176 doc.add(new Field("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
177 writer.addDocument(doc);
179 // new fields are in some different segments (we hope)
180 for (int i = 0; i < 5*mergeFactor; i++) {
181 doc = new Document();
182 doc.add(new Field("keyword2","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
183 doc.add(new Field("text2","test1", Field.Store.YES, Field.Index.ANALYZED));
184 doc.add(new Field("unindexed2","test1", Field.Store.YES, Field.Index.NO));
185 doc.add(new Field("unstored2","test1", Field.Store.NO, Field.Index.ANALYZED));
186 writer.addDocument(doc);
188 // new termvector fields
189 for (int i = 0; i < 5*mergeFactor; i++) {
190 doc = new Document();
191 doc.add(new Field("tvnot","tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
192 doc.add(new Field("termvector","termvector", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
193 doc.add(new Field("tvoffset","tvoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
194 doc.add(new Field("tvposition","tvposition", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
195 doc.add(newField("tvpositionoffset","tvpositionoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
196 writer.addDocument(doc);
200 // verify fields again
201 reader = IndexReader.open(d, false);
202 fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
203 assertEquals(13, fieldNames.size()); // the following fields
204 assertTrue(fieldNames.contains("keyword"));
205 assertTrue(fieldNames.contains("text"));
206 assertTrue(fieldNames.contains("unindexed"));
207 assertTrue(fieldNames.contains("unstored"));
208 assertTrue(fieldNames.contains("keyword2"));
209 assertTrue(fieldNames.contains("text2"));
210 assertTrue(fieldNames.contains("unindexed2"));
211 assertTrue(fieldNames.contains("unstored2"));
212 assertTrue(fieldNames.contains("tvnot"));
213 assertTrue(fieldNames.contains("termvector"));
214 assertTrue(fieldNames.contains("tvposition"));
215 assertTrue(fieldNames.contains("tvoffset"));
216 assertTrue(fieldNames.contains("tvpositionoffset"));
218 // verify that only indexed fields were returned
219 fieldNames = reader.getFieldNames(IndexReader.FieldOption.INDEXED);
220 assertEquals(11, fieldNames.size()); // 6 original + the 5 termvector fields
221 assertTrue(fieldNames.contains("keyword"));
222 assertTrue(fieldNames.contains("text"));
223 assertTrue(fieldNames.contains("unstored"));
224 assertTrue(fieldNames.contains("keyword2"));
225 assertTrue(fieldNames.contains("text2"));
226 assertTrue(fieldNames.contains("unstored2"));
227 assertTrue(fieldNames.contains("tvnot"));
228 assertTrue(fieldNames.contains("termvector"));
229 assertTrue(fieldNames.contains("tvposition"));
230 assertTrue(fieldNames.contains("tvoffset"));
231 assertTrue(fieldNames.contains("tvpositionoffset"));
233 // verify that only unindexed fields were returned
234 fieldNames = reader.getFieldNames(IndexReader.FieldOption.UNINDEXED);
235 assertEquals(2, fieldNames.size()); // the following fields
236 assertTrue(fieldNames.contains("unindexed"));
237 assertTrue(fieldNames.contains("unindexed2"));
239 // verify index term vector fields
240 fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR);
241 assertEquals(1, fieldNames.size()); // 1 field has term vector only
242 assertTrue(fieldNames.contains("termvector"));
244 fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION);
245 assertEquals(1, fieldNames.size()); // 4 fields are indexed with term vectors
246 assertTrue(fieldNames.contains("tvposition"));
248 fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET);
249 assertEquals(1, fieldNames.size()); // 4 fields are indexed with term vectors
250 assertTrue(fieldNames.contains("tvoffset"));
252 fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET);
253 assertEquals(1, fieldNames.size()); // 4 fields are indexed with term vectors
254 assertTrue(fieldNames.contains("tvpositionoffset"));
259 public void testTermVectors() throws Exception {
260 Directory d = newDirectory();
262 IndexWriter writer = new IndexWriter(
264 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
265 setMergePolicy(newLogMergePolicy())
267 // want to get some more segments here
268 // new termvector fields
269 int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
270 for (int i = 0; i < 5 * mergeFactor; i++) {
271 Document doc = new Document();
272 doc.add(new Field("tvnot","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
273 doc.add(new Field("termvector","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
274 doc.add(new Field("tvoffset","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
275 doc.add(new Field("tvposition","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
276 doc.add(new Field("tvpositionoffset","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
278 writer.addDocument(doc);
281 IndexReader reader = IndexReader.open(d, false);
282 FieldSortedTermVectorMapper mapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
283 reader.getTermFreqVector(0, mapper);
284 Map<String,SortedSet<TermVectorEntry>> map = mapper.getFieldToTerms();
285 assertTrue("map is null and it shouldn't be", map != null);
286 assertTrue("map Size: " + map.size() + " is not: " + 4, map.size() == 4);
287 Set<TermVectorEntry> set = map.get("termvector");
288 for (Iterator<TermVectorEntry> iterator = set.iterator(); iterator.hasNext();) {
289 TermVectorEntry entry = iterator.next();
290 assertTrue("entry is null and it shouldn't be", entry != null);
291 if (VERBOSE) System.out.println("Entry: " + entry);
297 static void assertTermDocsCount(String msg,
303 TermDocs tdocs = null;
306 tdocs = reader.termDocs(term);
307 assertNotNull(msg + ", null TermDocs", tdocs);
309 while(tdocs.next()) {
312 assertEquals(msg + ", count mismatch", expected, count);
322 public void testBinaryFields() throws IOException {
323 Directory dir = newDirectory();
324 byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
326 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
328 for (int i = 0; i < 10; i++) {
329 addDoc(writer, "document number " + (i + 1));
330 addDocumentWithFields(writer);
331 addDocumentWithDifferentFields(writer);
332 addDocumentWithTermVectorFields(writer);
335 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
336 Document doc = new Document();
337 doc.add(new Field("bin1", bin));
338 doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
339 writer.addDocument(doc);
341 IndexReader reader = IndexReader.open(dir, false);
342 doc = reader.document(reader.maxDoc() - 1);
343 Field[] fields = doc.getFields("bin1");
344 assertNotNull(fields);
345 assertEquals(1, fields.length);
346 Field b1 = fields[0];
347 assertTrue(b1.isBinary());
348 byte[] data1 = b1.getBinaryValue();
349 assertEquals(bin.length, b1.getBinaryLength());
350 for (int i = 0; i < bin.length; i++) {
351 assertEquals(bin[i], data1[i + b1.getBinaryOffset()]);
353 Set<String> lazyFields = new HashSet<String>();
354 lazyFields.add("bin1");
355 FieldSelector sel = new SetBasedFieldSelector(new HashSet<String>(), lazyFields);
356 doc = reader.document(reader.maxDoc() - 1, sel);
357 Fieldable[] fieldables = doc.getFieldables("bin1");
358 assertNotNull(fieldables);
359 assertEquals(1, fieldables.length);
360 Fieldable fb1 = fieldables[0];
361 assertTrue(fb1.isBinary());
362 assertEquals(bin.length, fb1.getBinaryLength());
363 data1 = fb1.getBinaryValue();
364 assertEquals(bin.length, fb1.getBinaryLength());
365 for (int i = 0; i < bin.length; i++) {
366 assertEquals(bin[i], data1[i + fb1.getBinaryOffset()]);
372 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
373 writer.forceMerge(1);
375 reader = IndexReader.open(dir, false);
376 doc = reader.document(reader.maxDoc() - 1);
377 fields = doc.getFields("bin1");
378 assertNotNull(fields);
379 assertEquals(1, fields.length);
381 assertTrue(b1.isBinary());
382 data1 = b1.getBinaryValue();
383 assertEquals(bin.length, b1.getBinaryLength());
384 for (int i = 0; i < bin.length; i++) {
385 assertEquals(bin[i], data1[i + b1.getBinaryOffset()]);
391 // Make sure attempts to make changes after reader is
392 // closed throws IOException:
393 public void testChangesAfterClose() throws IOException {
394 Directory dir = newDirectory();
396 IndexWriter writer = null;
397 IndexReader reader = null;
398 Term searchTerm = new Term("content", "aaa");
400 // add 11 documents with term : aaa
401 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
402 for (int i = 0; i < 11; i++) {
403 addDoc(writer, searchTerm.text());
407 reader = IndexReader.open(dir, false);
412 // Then, try to make changes:
414 reader.deleteDocument(4);
415 fail("deleteDocument after close failed to throw IOException");
416 } catch (AlreadyClosedException e) {
421 reader.setNorm(5, "aaa", 2.0f);
422 fail("setNorm after close failed to throw IOException");
423 } catch (AlreadyClosedException e) {
428 reader.undeleteAll();
429 fail("undeleteAll after close failed to throw IOException");
430 } catch (AlreadyClosedException e) {
436 // Make sure we get lock obtain failed exception with 2 writers:
437 public void testLockObtainFailed() throws IOException {
438 Directory dir = newDirectory();
440 Term searchTerm = new Term("content", "aaa");
442 // add 11 documents with term : aaa
443 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
445 for (int i = 0; i < 11; i++) {
446 addDoc(writer, searchTerm.text());
450 IndexReader reader = IndexReader.open(dir, false);
452 // Try to make changes
454 reader.deleteDocument(4);
455 fail("deleteDocument should have hit LockObtainFailedException");
456 } catch (LockObtainFailedException e) {
461 reader.setNorm(5, "aaa", 2.0f);
462 fail("setNorm should have hit LockObtainFailedException");
463 } catch (LockObtainFailedException e) {
468 reader.undeleteAll();
469 fail("undeleteAll should have hit LockObtainFailedException");
470 } catch (LockObtainFailedException e) {
478 // Make sure you can set norms & commit even if a reader
479 // is open against the index:
480 public void testWritingNorms() throws IOException {
481 Directory dir = newDirectory();
484 Term searchTerm = new Term("content", "aaa");
486 // add 1 documents with term : aaa
487 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
488 addDoc(writer, searchTerm.text());
491 // now open reader & set norm for doc 0
492 reader = IndexReader.open(dir, false);
493 reader.setNorm(0, "content", (float) 2.0);
495 // we should be holding the write lock now:
496 assertTrue("locked", IndexWriter.isLocked(dir));
500 // we should not be holding the write lock now:
501 assertTrue("not locked", !IndexWriter.isLocked(dir));
503 // open a 2nd reader:
504 IndexReader reader2 = IndexReader.open(dir, false);
506 // set norm again for doc 0
507 reader.setNorm(0, "content", (float) 3.0);
508 assertTrue("locked", IndexWriter.isLocked(dir));
512 // we should not be holding the write lock now:
513 assertTrue("not locked", !IndexWriter.isLocked(dir));
520 // Make sure you can set norms & commit, and there are
521 // no extra norms files left:
522 public void testWritingNormsNoReader() throws IOException {
523 Directory dir = newDirectory();
524 IndexWriter writer = null;
525 IndexReader reader = null;
526 Term searchTerm = new Term("content", "aaa");
528 // add 1 documents with term : aaa
529 writer = new IndexWriter(
531 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
532 setMergePolicy(newLogMergePolicy(false))
534 addDoc(writer, searchTerm.text());
537 // now open reader & set norm for doc 0 (writes to
539 reader = IndexReader.open(dir, false);
540 reader.setNorm(0, "content", (float) 2.0);
543 // now open reader again & set norm for doc 0 (writes to _0_2.s0)
544 reader = IndexReader.open(dir, false);
545 reader.setNorm(0, "content", (float) 2.0);
547 assertFalse("failed to remove first generation norms file on writing second generation",
548 dir.fileExists("_0_1.s0"));
553 /* ??? public void testOpenEmptyDirectory() throws IOException{
554 String dirName = "test.empty";
555 File fileDirName = new File(dirName);
556 if (!fileDirName.exists()) {
560 IndexReader.open(fileDirName);
561 fail("opening IndexReader on empty directory failed to produce FileNotFoundException");
562 } catch (FileNotFoundException e) {
568 public void testFilesOpenClose() throws IOException {
569 // Create initial data set
570 File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
571 Directory dir = newFSDirectory(dirFile);
572 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
573 addDoc(writer, "test");
577 // Try to erase the data - this ensures that the writer closed all files
578 _TestUtil.rmDir(dirFile);
579 dir = newFSDirectory(dirFile);
581 // Now create the data set again, just as before
582 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
583 addDoc(writer, "test");
587 // Now open existing directory and test that reader closes all files
588 dir = newFSDirectory(dirFile);
589 IndexReader reader1 = IndexReader.open(dir, false);
593 // The following will fail if reader did not close
595 _TestUtil.rmDir(dirFile);
598 public void testLastModified() throws Exception {
599 for(int i=0;i<2;i++) {
600 final Directory dir = newDirectory();
601 assertFalse(IndexReader.indexExists(dir));
602 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
603 addDocumentWithFields(writer);
604 assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
606 assertTrue(IndexReader.indexExists(dir));
607 IndexReader reader = IndexReader.open(dir, false);
608 assertFalse(IndexWriter.isLocked(dir)); // reader only, no lock
609 long version = IndexReader.lastModified(dir);
611 long version2 = IndexReader.lastModified(dir);
612 assertEquals(version, version2);
615 // modify index and check version has been
619 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
620 addDocumentWithFields(writer);
622 reader = IndexReader.open(dir, false);
623 assertTrue("old lastModified is " + version + "; new lastModified is " + IndexReader.lastModified(dir), version <= IndexReader.lastModified(dir));
629 public void testVersion() throws IOException {
630 Directory dir = newDirectory();
631 assertFalse(IndexReader.indexExists(dir));
632 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
633 addDocumentWithFields(writer);
634 assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
636 assertTrue(IndexReader.indexExists(dir));
637 IndexReader reader = IndexReader.open(dir, false);
638 assertFalse(IndexWriter.isLocked(dir)); // reader only, no lock
639 long version = IndexReader.getCurrentVersion(dir);
641 // modify index and check version has been
643 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
644 addDocumentWithFields(writer);
646 reader = IndexReader.open(dir, false);
647 assertTrue("old version is " + version + "; new version is " + IndexReader.getCurrentVersion(dir), version < IndexReader.getCurrentVersion(dir));
652 public void testLock() throws IOException {
653 Directory dir = newDirectory();
654 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
655 addDocumentWithFields(writer);
657 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
658 IndexReader reader = IndexReader.open(dir, false);
660 reader.deleteDocument(0);
661 fail("expected lock");
662 } catch(IOException e) {
663 // expected exception
666 IndexWriter.unlock(dir); // this should not be done in the real world!
667 } catch (LockReleaseFailedException lrfe) {
670 reader.deleteDocument(0);
676 public void testDocsOutOfOrderJIRA140() throws IOException {
677 Directory dir = newDirectory();
678 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
679 for(int i=0;i<11;i++) {
680 addDoc(writer, "aaa");
683 IndexReader reader = IndexReader.open(dir, false);
685 // Try to delete an invalid docId, yet, within range
686 // of the final bits of the BitVector:
688 boolean gotException = false;
690 reader.deleteDocument(11);
691 } catch (ArrayIndexOutOfBoundsException e) {
696 writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
698 // We must add more docs to get a new segment written
699 for(int i=0;i<11;i++) {
700 addDoc(writer, "aaa");
703 // Without the fix for LUCENE-140 this call will
704 // [incorrectly] hit a "docs out of order"
705 // IllegalStateException because above out-of-bounds
706 // deleteDocument corrupted the index:
707 writer.forceMerge(1);
710 fail("delete of out-of-bounds doc number failed to hit exception");
715 public void testExceptionReleaseWriteLockJIRA768() throws IOException {
717 Directory dir = newDirectory();
718 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
719 addDoc(writer, "aaa");
722 IndexReader reader = IndexReader.open(dir, false);
724 reader.deleteDocument(1);
725 fail("did not hit exception when deleting an invalid doc number");
726 } catch (ArrayIndexOutOfBoundsException e) {
730 if (IndexWriter.isLocked(dir)) {
731 fail("write lock is still held after close");
734 reader = IndexReader.open(dir, false);
736 reader.setNorm(1, "content", (float) 2.0);
737 fail("did not hit exception when calling setNorm on an invalid doc number");
738 } catch (ArrayIndexOutOfBoundsException e) {
742 if (IndexWriter.isLocked(dir)) {
743 fail("write lock is still held after close");
748 private String arrayToString(String[] l) {
750 for(int i=0;i<l.length;i++) {
759 public void testOpenReaderAfterDelete() throws IOException {
760 File dirFile = _TestUtil.getTempDir("deletetest");
761 Directory dir = newFSDirectory(dirFile);
763 IndexReader.open(dir, false);
764 fail("expected FileNotFoundException");
765 } catch (FileNotFoundException e) {
771 // Make sure we still get a CorruptIndexException (not NPE):
773 IndexReader.open(dir, false);
774 fail("expected FileNotFoundException");
775 } catch (FileNotFoundException e) {
782 static void addDocumentWithFields(IndexWriter writer) throws IOException
784 Document doc = new Document();
785 doc.add(newField("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
786 doc.add(newField("text","test1", Field.Store.YES, Field.Index.ANALYZED));
787 doc.add(newField("unindexed","test1", Field.Store.YES, Field.Index.NO));
788 doc.add(newField("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
789 writer.addDocument(doc);
792 static void addDocumentWithDifferentFields(IndexWriter writer) throws IOException
794 Document doc = new Document();
795 doc.add(newField("keyword2","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
796 doc.add(newField("text2","test1", Field.Store.YES, Field.Index.ANALYZED));
797 doc.add(newField("unindexed2","test1", Field.Store.YES, Field.Index.NO));
798 doc.add(newField("unstored2","test1", Field.Store.NO, Field.Index.ANALYZED));
799 writer.addDocument(doc);
802 static void addDocumentWithTermVectorFields(IndexWriter writer) throws IOException
804 Document doc = new Document();
805 doc.add(newField("tvnot","tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
806 doc.add(newField("termvector","termvector", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
807 doc.add(newField("tvoffset","tvoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
808 doc.add(newField("tvposition","tvposition", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
809 doc.add(newField("tvpositionoffset","tvpositionoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
811 writer.addDocument(doc);
814 static void addDoc(IndexWriter writer, String value) throws IOException {
815 Document doc = new Document();
816 doc.add(newField("content", value, Field.Store.NO, Field.Index.ANALYZED));
817 writer.addDocument(doc);
820 public static void assertIndexEquals(IndexReader index1, IndexReader index2) throws IOException {
821 assertEquals("IndexReaders have different values for numDocs.", index1.numDocs(), index2.numDocs());
822 assertEquals("IndexReaders have different values for maxDoc.", index1.maxDoc(), index2.maxDoc());
823 assertEquals("Only one IndexReader has deletions.", index1.hasDeletions(), index2.hasDeletions());
824 if (!(index1 instanceof ParallelReader)) {
825 assertEquals("Single segment test differs.", index1.getSequentialSubReaders().length == 1, index2.getSequentialSubReaders().length == 1);
829 Collection<String> fields1 = index1.getFieldNames(FieldOption.ALL);
830 Collection<String> fields2 = index1.getFieldNames(FieldOption.ALL);
831 assertEquals("IndexReaders have different numbers of fields.", fields1.size(), fields2.size());
832 Iterator<String> it1 = fields1.iterator();
833 Iterator<String> it2 = fields1.iterator();
834 while (it1.hasNext()) {
835 assertEquals("Different field names.", it1.next(), it2.next());
839 it1 = fields1.iterator();
840 while (it1.hasNext()) {
841 String curField = it1.next();
842 byte[] norms1 = index1.norms(curField);
843 byte[] norms2 = index2.norms(curField);
844 if (norms1 != null && norms2 != null)
846 assertEquals(norms1.length, norms2.length);
847 for (int i = 0; i < norms1.length; i++) {
848 assertEquals("Norm different for doc " + i + " and field '" + curField + "'.", norms1[i], norms2[i]);
853 assertSame(norms1, norms2);
858 for (int i = 0; i < index1.maxDoc(); i++) {
859 assertEquals("Doc " + i + " only deleted in one index.", index1.isDeleted(i), index2.isDeleted(i));
862 // check stored fields
863 for (int i = 0; i < index1.maxDoc(); i++) {
864 if (!index1.isDeleted(i)) {
865 Document doc1 = index1.document(i);
866 Document doc2 = index2.document(i);
867 List<Fieldable> fieldable1 = doc1.getFields();
868 List<Fieldable> fieldable2 = doc2.getFields();
869 assertEquals("Different numbers of fields for doc " + i + ".", fieldable1.size(), fieldable2.size());
870 Iterator<Fieldable> itField1 = fieldable1.iterator();
871 Iterator<Fieldable> itField2 = fieldable2.iterator();
872 while (itField1.hasNext()) {
873 Field curField1 = (Field) itField1.next();
874 Field curField2 = (Field) itField2.next();
875 assertEquals("Different fields names for doc " + i + ".", curField1.name(), curField2.name());
876 assertEquals("Different field values for doc " + i + ".", curField1.stringValue(), curField2.stringValue());
881 // check dictionary and posting lists
882 TermEnum enum1 = index1.terms();
883 TermEnum enum2 = index2.terms();
884 TermPositions tp1 = index1.termPositions();
885 TermPositions tp2 = index2.termPositions();
886 while(enum1.next()) {
887 assertTrue(enum2.next());
888 assertEquals("Different term in dictionary.", enum1.term(), enum2.term());
889 tp1.seek(enum1.term());
890 tp2.seek(enum1.term());
892 assertTrue(tp2.next());
893 assertEquals("Different doc id in postinglist of term " + enum1.term() + ".", tp1.doc(), tp2.doc());
894 assertEquals("Different term frequence in postinglist of term " + enum1.term() + ".", tp1.freq(), tp2.freq());
895 for (int i = 0; i < tp1.freq(); i++) {
896 assertEquals("Different positions in postinglist of term " + enum1.term() + ".", tp1.nextPosition(), tp2.nextPosition());
902 public void testGetIndexCommit() throws IOException {
904 Directory d = newDirectory();
907 IndexWriter writer = new IndexWriter(
909 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
910 setMaxBufferedDocs(2).
911 setMergePolicy(newLogMergePolicy(10))
913 for(int i=0;i<27;i++)
914 addDocumentWithFields(writer);
917 SegmentInfos sis = new SegmentInfos();
919 IndexReader r = IndexReader.open(d, false);
920 IndexCommit c = r.getIndexCommit();
922 assertEquals(sis.getCurrentSegmentFileName(), c.getSegmentsFileName());
924 assertTrue(c.equals(r.getIndexCommit()));
927 writer = new IndexWriter(
929 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
930 setOpenMode(OpenMode.APPEND).
931 setMaxBufferedDocs(2).
932 setMergePolicy(newLogMergePolicy(10))
935 addDocumentWithFields(writer);
938 IndexReader r2 = IndexReader.openIfChanged(r);
940 assertFalse(c.equals(r2.getIndexCommit()));
941 assertFalse(r2.getIndexCommit().getSegmentCount() == 1);
944 writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
945 new MockAnalyzer(random))
946 .setOpenMode(OpenMode.APPEND));
947 writer.forceMerge(1);
950 r2 = IndexReader.openIfChanged(r);
952 assertNull(IndexReader.openIfChanged(r2));
953 assertEquals(1, r2.getIndexCommit().getSegmentCount());
960 public void testReadOnly() throws Throwable {
961 Directory d = newDirectory();
962 IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
963 TEST_VERSION_CURRENT, new MockAnalyzer(random)));
964 addDocumentWithFields(writer);
966 addDocumentWithFields(writer);
969 IndexReader r = IndexReader.open(d, true);
973 } catch (UnsupportedOperationException uoe) {
977 writer = new IndexWriter(
979 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
980 setOpenMode(OpenMode.APPEND).
981 setMergePolicy(newLogMergePolicy(10))
983 addDocumentWithFields(writer);
986 // Make sure reopen is still readonly:
987 IndexReader r2 = IndexReader.openIfChanged(r);
991 assertFalse(r == r2);
994 r2.deleteDocument(0);
996 } catch (UnsupportedOperationException uoe) {
1000 writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
1001 new MockAnalyzer(random))
1002 .setOpenMode(OpenMode.APPEND));
1003 writer.forceMerge(1);
1006 // Make sure reopen to a single segment is still readonly:
1007 IndexReader r3 = IndexReader.openIfChanged(r2);
1009 assertFalse(r3 == r2);
1012 assertFalse(r == r2);
1015 r3.deleteDocument(0);
1017 } catch (UnsupportedOperationException uoe) {
1021 // Make sure write lock isn't held
1022 writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
1023 new MockAnalyzer(random))
1024 .setOpenMode(OpenMode.APPEND));
1033 public void testIndexReader() throws Exception {
1034 Directory dir = newDirectory();
1035 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
1036 TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1037 writer.addDocument(createDocument("a"));
1038 writer.addDocument(createDocument("b"));
1039 writer.addDocument(createDocument("c"));
1041 IndexReader reader = IndexReader.open(dir, false);
1042 reader.deleteDocuments(new Term("id", "a"));
1044 reader.deleteDocuments(new Term("id", "b"));
1046 IndexReader.open(dir,true).close();
1050 static Document createDocument(String id) {
1051 Document doc = new Document();
1052 doc.add(newField("id", id, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
1056 // LUCENE-1468 -- make sure on attempting to open an
1057 // IndexReader on a non-existent directory, you get a
1059 public void testNoDir() throws Throwable {
1060 Directory dir = newFSDirectory(_TestUtil.getTempDir("doesnotexist"));
1062 IndexReader.open(dir, true);
1063 fail("did not hit expected exception");
1064 } catch (NoSuchDirectoryException nsde) {
1071 public void testNoDupCommitFileNames() throws Throwable {
1073 Directory dir = newDirectory();
1075 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
1076 TEST_VERSION_CURRENT, new MockAnalyzer(random))
1077 .setMaxBufferedDocs(2));
1078 writer.addDocument(createDocument("a"));
1079 writer.addDocument(createDocument("a"));
1080 writer.addDocument(createDocument("a"));
1083 Collection<IndexCommit> commits = IndexReader.listCommits(dir);
1084 for (final IndexCommit commit : commits) {
1085 Collection<String> files = commit.getFileNames();
1086 HashSet<String> seen = new HashSet<String>();
1087 for (final String fileName : files) {
1088 assertTrue("file " + fileName + " was duplicated", !seen.contains(fileName));
1096 // LUCENE-1579: Ensure that on a cloned reader, segments
1097 // reuse the doc values arrays in FieldCache
1098 public void testFieldCacheReuseAfterClone() throws Exception {
1099 Directory dir = newDirectory();
1100 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1101 Document doc = new Document();
1102 doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
1103 writer.addDocument(doc);
1107 IndexReader r = SegmentReader.getOnlySegmentReader(dir);
1108 final int[] ints = FieldCache.DEFAULT.getInts(r, "number");
1109 assertEquals(1, ints.length);
1110 assertEquals(17, ints[0]);
1113 IndexReader r2 = (IndexReader) r.clone();
1115 assertTrue(r2 != r);
1116 final int[] ints2 = FieldCache.DEFAULT.getInts(r2, "number");
1119 assertEquals(1, ints2.length);
1120 assertEquals(17, ints2[0]);
1121 assertTrue(ints == ints2);
1126 // LUCENE-1579: Ensure that on a reopened reader, that any
1127 // shared segments reuse the doc values arrays in
1129 public void testFieldCacheReuseAfterReopen() throws Exception {
1130 Directory dir = newDirectory();
1131 IndexWriter writer = new IndexWriter(
1133 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
1134 setMergePolicy(newLogMergePolicy(10))
1136 Document doc = new Document();
1137 doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
1138 ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
1139 writer.addDocument(doc);
1143 IndexReader r = IndexReader.open(dir, false);
1144 IndexReader r1 = SegmentReader.getOnlySegmentReader(r);
1145 final int[] ints = FieldCache.DEFAULT.getInts(r1, "number");
1146 assertEquals(1, ints.length);
1147 assertEquals(17, ints[0]);
1150 writer.addDocument(doc);
1153 // Reopen reader1 --> reader2
1154 IndexReader r2 = IndexReader.openIfChanged(r);
1157 IndexReader sub0 = r2.getSequentialSubReaders()[0];
1158 final int[] ints2 = FieldCache.DEFAULT.getInts(sub0, "number");
1160 assertTrue(ints == ints2);
1166 // LUCENE-1586: getUniqueTermCount
1167 public void testUniqueTermCount() throws Exception {
1168 Directory dir = newDirectory();
1169 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1170 Document doc = new Document();
1171 doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
1172 doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
1173 writer.addDocument(doc);
1174 writer.addDocument(doc);
1177 IndexReader r = IndexReader.open(dir, false);
1178 IndexReader r1 = SegmentReader.getOnlySegmentReader(r);
1179 assertEquals(36, r1.getUniqueTermCount());
1180 writer.addDocument(doc);
1182 IndexReader r2 = IndexReader.openIfChanged(r);
1186 r2.getUniqueTermCount();
1187 fail("expected exception");
1188 } catch (UnsupportedOperationException uoe) {
1191 IndexReader[] subs = r2.getSequentialSubReaders();
1192 for(int i=0;i<subs.length;i++) {
1193 assertEquals(36, subs[i].getUniqueTermCount());
1200 // LUCENE-1609: don't load terms index
1201 public void testNoTermsIndex() throws Throwable {
1202 Directory dir = newDirectory();
1203 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1204 Document doc = new Document();
1205 doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
1206 doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
1207 writer.addDocument(doc);
1208 writer.addDocument(doc);
1211 IndexReader r = IndexReader.open(dir, null, true, -1);
1213 r.docFreq(new Term("field", "f"));
1214 fail("did not hit expected exception");
1215 } catch (IllegalStateException ise) {
1218 assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
1220 assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
1221 writer = new IndexWriter(
1223 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
1224 setMergePolicy(newLogMergePolicy(10))
1226 writer.addDocument(doc);
1229 // LUCENE-1718: ensure re-open carries over no terms index:
1230 IndexReader r2 = IndexReader.openIfChanged(r);
1232 assertNull(IndexReader.openIfChanged(r2));
1234 IndexReader[] subReaders = r2.getSequentialSubReaders();
1235 assertEquals(2, subReaders.length);
1236 for(int i=0;i<2;i++) {
1237 assertFalse(((SegmentReader) subReaders[i]).termsIndexLoaded());
1244 public void testPrepareCommitIsCurrent() throws Throwable {
1245 Directory dir = newDirectory();
1246 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
1247 TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1249 Document doc = new Document();
1250 writer.addDocument(doc);
1251 IndexReader r = IndexReader.open(dir, true);
1252 assertTrue(r.isCurrent());
1253 writer.addDocument(doc);
1254 writer.prepareCommit();
1255 assertTrue(r.isCurrent());
1256 IndexReader r2 = IndexReader.openIfChanged(r);
1259 assertFalse(r.isCurrent());
1266 public void testListCommits() throws Exception {
1267 Directory dir = newDirectory();
1268 SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
1269 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
1270 TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(sdp));
1271 writer.addDocument(new Document());
1274 writer.addDocument(new Document());
1277 writer.addDocument(new Document());
1281 long currentGen = 0;
1282 for (IndexCommit ic : IndexReader.listCommits(dir)) {
1283 assertTrue("currentGen=" + currentGen + " commitGen=" + ic.getGeneration(), currentGen < ic.getGeneration());
1284 currentGen = ic.getGeneration();
1290 public void testIndexExists() throws Exception {
1291 Directory dir = newDirectory();
1292 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1293 writer.addDocument(new Document());
1294 writer.prepareCommit();
1295 assertFalse(IndexReader.indexExists(dir));
1297 assertTrue(IndexReader.indexExists(dir));
1302 public void testReaderFinishedListener() throws Exception {
1303 Directory dir = newDirectory();
1304 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
1305 ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
1306 writer.setInfoStream(VERBOSE ? System.out : null);
1307 writer.addDocument(new Document());
1309 writer.addDocument(new Document());
1311 final IndexReader reader = writer.getReader();
1312 final int[] closeCount = new int[1];
1313 final IndexReader.ReaderFinishedListener listener = new IndexReader.ReaderFinishedListener() {
1314 public void finished(IndexReader reader) {
1319 reader.addReaderFinishedListener(listener);
1323 // Just the top reader
1324 assertEquals(1, closeCount[0]);
1327 // Now also the subs
1328 assertEquals(3, closeCount[0]);
1330 IndexReader reader2 = IndexReader.open(dir);
1331 reader2.addReaderFinishedListener(listener);
1335 assertEquals(3, closeCount[0]);
1339 public void testOOBDocID() throws Exception {
1340 Directory dir = newDirectory();
1341 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1342 writer.addDocument(new Document());
1343 IndexReader r = writer.getReader();
1348 fail("did not hit exception");
1349 } catch (IllegalArgumentException iae) {
1356 public void testTryIncRef() throws CorruptIndexException, LockObtainFailedException, IOException {
1357 Directory dir = newDirectory();
1358 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1359 writer.addDocument(new Document());
1361 IndexReader r = IndexReader.open(dir);
1362 assertTrue(r.tryIncRef());
1365 assertFalse(r.tryIncRef());
1370 public void testStressTryIncRef() throws CorruptIndexException, LockObtainFailedException, IOException, InterruptedException {
1371 Directory dir = newDirectory();
1372 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1373 writer.addDocument(new Document());
1375 IndexReader r = IndexReader.open(dir);
1376 int numThreads = atLeast(2);
1378 IncThread[] threads = new IncThread[numThreads];
1379 for (int i = 0; i < threads.length; i++) {
1380 threads[i] = new IncThread(r, random);
1385 assertTrue(r.tryIncRef());
1389 for (int i = 0; i < threads.length; i++) {
1391 assertNull(threads[i].failed);
1393 assertFalse(r.tryIncRef());
1398 static class IncThread extends Thread {
1399 final IndexReader toInc;
1400 final Random random;
1403 IncThread(IndexReader toInc, Random random) {
1405 this.random = random;
1411 while (toInc.tryIncRef()) {
1412 assertFalse(toInc.hasDeletions());
1415 assertFalse(toInc.tryIncRef());
1416 } catch (Throwable e) {