pylucene 3.5.0-3
[pylucene.git] / lucene-java-3.5.0 / lucene / src / test / org / apache / lucene / index / TestIndexReader.java
1 package org.apache.lucene.index;
2
3 /**
4  * Licensed to the Apache Software Foundation (ASF) under one or more
5  * contributor license agreements.  See the NOTICE file distributed with
6  * this work for additional information regarding copyright ownership.
7  * The ASF licenses this file to You under the Apache License, Version 2.0
8  * (the "License"); you may not use this file except in compliance with
9  * the License.  You may obtain a copy of the License at
10  *
11  *     http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19
20
21 import java.io.File;
22 import java.io.FileNotFoundException;
23 import java.io.IOException;
24 import java.util.Collection;
25 import java.util.HashSet;
26 import java.util.Iterator;
27 import java.util.List;
28 import java.util.Map;
29 import java.util.HashMap;
30 import java.util.Random;
31 import java.util.Set;
32 import java.util.SortedSet;
33
34 import org.apache.lucene.analysis.MockAnalyzer;
35 import org.apache.lucene.analysis.WhitespaceAnalyzer;
36 import org.apache.lucene.document.Document;
37 import org.apache.lucene.document.Field;
38 import org.apache.lucene.document.FieldSelector;
39 import org.apache.lucene.document.Fieldable;
40 import org.apache.lucene.document.SetBasedFieldSelector;
41 import org.apache.lucene.index.IndexReader.FieldOption;
42 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
43 import org.apache.lucene.search.FieldCache;
44 import org.apache.lucene.store.AlreadyClosedException;
45 import org.apache.lucene.store.Directory;
46 import org.apache.lucene.store.LockObtainFailedException;
47 import org.apache.lucene.store.NoSuchDirectoryException;
48 import org.apache.lucene.store.LockReleaseFailedException;
49 import org.apache.lucene.util.LuceneTestCase;
50 import org.apache.lucene.util._TestUtil;
51
52 public class TestIndexReader extends LuceneTestCase {
53     
54     public void testCommitUserData() throws Exception {
55       Directory d = newDirectory();
56
57       Map<String,String> commitUserData = new HashMap<String,String>();
58       commitUserData.put("foo", "fighters");
59       
60       // set up writer
61       IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
62           TEST_VERSION_CURRENT, new MockAnalyzer(random))
63       .setMaxBufferedDocs(2));
64       for(int i=0;i<27;i++)
65         addDocumentWithFields(writer);
66       writer.close();
67       
68       IndexReader r = IndexReader.open(d, false);
69       r.deleteDocument(5);
70       r.flush(commitUserData);
71       IndexCommit c = r.getIndexCommit();
72       r.close();
73       
74       SegmentInfos sis = new SegmentInfos();
75       sis.read(d);
76       IndexReader r2 = IndexReader.open(d, false);
77       assertEquals(c.getUserData(), commitUserData);
78
79       assertEquals(sis.getCurrentSegmentFileName(), c.getSegmentsFileName());
80
81       // Change the index
82       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
83           new MockAnalyzer(random)).setOpenMode(
84               OpenMode.APPEND).setMaxBufferedDocs(2));
85       for(int i=0;i<7;i++)
86         addDocumentWithFields(writer);
87       writer.close();
88
89       IndexReader r3 = IndexReader.openIfChanged(r2);
90       assertNotNull(r3);
91       assertFalse(c.equals(r3.getIndexCommit()));
92       assertFalse(r2.getIndexCommit().getSegmentCount() == 1 && !r2.hasDeletions());
93       r3.close();
94
95       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
96         new MockAnalyzer(random))
97         .setOpenMode(OpenMode.APPEND));
98       writer.forceMerge(1);
99       writer.close();
100
101       r3 = IndexReader.openIfChanged(r2);
102       assertNotNull(r3);
103       assertEquals(1, r3.getIndexCommit().getSegmentCount());
104       r2.close();
105       r3.close();
106       d.close();
107     }
108     
109     public void testIsCurrent() throws Exception {
110       Directory d = newDirectory();
111       IndexWriter writer = new IndexWriter(d, newIndexWriterConfig( 
112         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
113       addDocumentWithFields(writer);
114       writer.close();
115       // set up reader:
116       IndexReader reader = IndexReader.open(d, false);
117       assertTrue(reader.isCurrent());
118       // modify index by adding another document:
119       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
120           new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
121       addDocumentWithFields(writer);
122       writer.close();
123       assertFalse(reader.isCurrent());
124       // re-create index:
125       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
126           new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
127       addDocumentWithFields(writer);
128       writer.close();
129       assertFalse(reader.isCurrent());
130       reader.close();
131       d.close();
132     }
133
134     /**
135      * Tests the IndexReader.getFieldNames implementation
136      * @throws Exception on error
137      */
138     public void testGetFieldNames() throws Exception {
139         Directory d = newDirectory();
140         // set up writer
141         IndexWriter writer = new IndexWriter(
142             d,
143             newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
144         );
145
146         Document doc = new Document();
147         doc.add(new Field("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
148         doc.add(new Field("text","test1", Field.Store.YES, Field.Index.ANALYZED));
149         doc.add(new Field("unindexed","test1", Field.Store.YES, Field.Index.NO));
150         doc.add(new Field("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
151         writer.addDocument(doc);
152
153         writer.close();
154         // set up reader
155         IndexReader reader = IndexReader.open(d, false);
156         Collection<String> fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
157         assertTrue(fieldNames.contains("keyword"));
158         assertTrue(fieldNames.contains("text"));
159         assertTrue(fieldNames.contains("unindexed"));
160         assertTrue(fieldNames.contains("unstored"));
161         reader.close();
162         // add more documents
163         writer = new IndexWriter(
164             d,
165             newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
166                 setOpenMode(OpenMode.APPEND).
167                 setMergePolicy(newLogMergePolicy())
168         );
169         // want to get some more segments here
170         int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
171         for (int i = 0; i < 5*mergeFactor; i++) {
172           doc = new Document();
173           doc.add(new Field("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
174           doc.add(new Field("text","test1", Field.Store.YES, Field.Index.ANALYZED));
175           doc.add(new Field("unindexed","test1", Field.Store.YES, Field.Index.NO));
176           doc.add(new Field("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
177           writer.addDocument(doc);
178         }
179         // new fields are in some different segments (we hope)
180         for (int i = 0; i < 5*mergeFactor; i++) {
181           doc = new Document();
182           doc.add(new Field("keyword2","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
183           doc.add(new Field("text2","test1", Field.Store.YES, Field.Index.ANALYZED));
184           doc.add(new Field("unindexed2","test1", Field.Store.YES, Field.Index.NO));
185           doc.add(new Field("unstored2","test1", Field.Store.NO, Field.Index.ANALYZED));
186           writer.addDocument(doc);
187         }
188         // new termvector fields
189         for (int i = 0; i < 5*mergeFactor; i++) {
190           doc = new Document();
191           doc.add(new Field("tvnot","tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
192           doc.add(new Field("termvector","termvector", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
193           doc.add(new Field("tvoffset","tvoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
194           doc.add(new Field("tvposition","tvposition", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
195           doc.add(newField("tvpositionoffset","tvpositionoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
196           writer.addDocument(doc);
197         }
198         
199         writer.close();
200         // verify fields again
201         reader = IndexReader.open(d, false);
202         fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
203         assertEquals(13, fieldNames.size());    // the following fields
204         assertTrue(fieldNames.contains("keyword"));
205         assertTrue(fieldNames.contains("text"));
206         assertTrue(fieldNames.contains("unindexed"));
207         assertTrue(fieldNames.contains("unstored"));
208         assertTrue(fieldNames.contains("keyword2"));
209         assertTrue(fieldNames.contains("text2"));
210         assertTrue(fieldNames.contains("unindexed2"));
211         assertTrue(fieldNames.contains("unstored2"));
212         assertTrue(fieldNames.contains("tvnot"));
213         assertTrue(fieldNames.contains("termvector"));
214         assertTrue(fieldNames.contains("tvposition"));
215         assertTrue(fieldNames.contains("tvoffset"));
216         assertTrue(fieldNames.contains("tvpositionoffset"));
217         
218         // verify that only indexed fields were returned
219         fieldNames = reader.getFieldNames(IndexReader.FieldOption.INDEXED);
220         assertEquals(11, fieldNames.size());    // 6 original + the 5 termvector fields 
221         assertTrue(fieldNames.contains("keyword"));
222         assertTrue(fieldNames.contains("text"));
223         assertTrue(fieldNames.contains("unstored"));
224         assertTrue(fieldNames.contains("keyword2"));
225         assertTrue(fieldNames.contains("text2"));
226         assertTrue(fieldNames.contains("unstored2"));
227         assertTrue(fieldNames.contains("tvnot"));
228         assertTrue(fieldNames.contains("termvector"));
229         assertTrue(fieldNames.contains("tvposition"));
230         assertTrue(fieldNames.contains("tvoffset"));
231         assertTrue(fieldNames.contains("tvpositionoffset"));
232         
233         // verify that only unindexed fields were returned
234         fieldNames = reader.getFieldNames(IndexReader.FieldOption.UNINDEXED);
235         assertEquals(2, fieldNames.size());    // the following fields
236         assertTrue(fieldNames.contains("unindexed"));
237         assertTrue(fieldNames.contains("unindexed2"));
238                 
239         // verify index term vector fields  
240         fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR);
241         assertEquals(1, fieldNames.size());    // 1 field has term vector only
242         assertTrue(fieldNames.contains("termvector"));
243         
244         fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION);
245         assertEquals(1, fieldNames.size());    // 4 fields are indexed with term vectors
246         assertTrue(fieldNames.contains("tvposition"));
247         
248         fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET);
249         assertEquals(1, fieldNames.size());    // 4 fields are indexed with term vectors
250         assertTrue(fieldNames.contains("tvoffset"));
251                 
252         fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET);
253         assertEquals(1, fieldNames.size());    // 4 fields are indexed with term vectors
254         assertTrue(fieldNames.contains("tvpositionoffset"));
255         reader.close();
256         d.close();
257     }
258
259   public void testTermVectors() throws Exception {
260     Directory d = newDirectory();
261     // set up writer
262     IndexWriter writer = new IndexWriter(
263         d,
264         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
265             setMergePolicy(newLogMergePolicy())
266     );
267     // want to get some more segments here
268     // new termvector fields
269     int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
270     for (int i = 0; i < 5 * mergeFactor; i++) {
271       Document doc = new Document();
272         doc.add(new Field("tvnot","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
273         doc.add(new Field("termvector","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
274         doc.add(new Field("tvoffset","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
275         doc.add(new Field("tvposition","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
276         doc.add(new Field("tvpositionoffset","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
277
278         writer.addDocument(doc);
279     }
280     writer.close();
281     IndexReader reader = IndexReader.open(d, false);
282     FieldSortedTermVectorMapper mapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
283     reader.getTermFreqVector(0, mapper);
284     Map<String,SortedSet<TermVectorEntry>> map = mapper.getFieldToTerms();
285     assertTrue("map is null and it shouldn't be", map != null);
286     assertTrue("map Size: " + map.size() + " is not: " + 4, map.size() == 4);
287     Set<TermVectorEntry> set = map.get("termvector");
288     for (Iterator<TermVectorEntry> iterator = set.iterator(); iterator.hasNext();) {
289       TermVectorEntry entry =  iterator.next();
290       assertTrue("entry is null and it shouldn't be", entry != null);
291       if (VERBOSE) System.out.println("Entry: " + entry);
292     }
293     reader.close();
294     d.close();
295   }
296
297   static void assertTermDocsCount(String msg,
298                                      IndexReader reader,
299                                      Term term,
300                                      int expected)
301     throws IOException
302     {
303         TermDocs tdocs = null;
304
305         try {
306             tdocs = reader.termDocs(term);
307             assertNotNull(msg + ", null TermDocs", tdocs);
308             int count = 0;
309             while(tdocs.next()) {
310                 count++;
311             }
312             assertEquals(msg + ", count mismatch", expected, count);
313
314         } finally {
315             if (tdocs != null)
316                 tdocs.close();
317         }
318
319     }
320
321     
322     public void testBinaryFields() throws IOException {
323         Directory dir = newDirectory();
324         byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
325         
326         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
327         
328         for (int i = 0; i < 10; i++) {
329           addDoc(writer, "document number " + (i + 1));
330           addDocumentWithFields(writer);
331           addDocumentWithDifferentFields(writer);
332           addDocumentWithTermVectorFields(writer);
333         }
334         writer.close();
335         writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
336         Document doc = new Document();
337         doc.add(new Field("bin1", bin));
338         doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
339         writer.addDocument(doc);
340         writer.close();
341         IndexReader reader = IndexReader.open(dir, false);
342         doc = reader.document(reader.maxDoc() - 1);
343         Field[] fields = doc.getFields("bin1");
344         assertNotNull(fields);
345         assertEquals(1, fields.length);
346         Field b1 = fields[0];
347         assertTrue(b1.isBinary());
348         byte[] data1 = b1.getBinaryValue();
349         assertEquals(bin.length, b1.getBinaryLength());
350         for (int i = 0; i < bin.length; i++) {
351           assertEquals(bin[i], data1[i + b1.getBinaryOffset()]);
352         }
353         Set<String> lazyFields = new HashSet<String>();
354         lazyFields.add("bin1");
355         FieldSelector sel = new SetBasedFieldSelector(new HashSet<String>(), lazyFields);
356         doc = reader.document(reader.maxDoc() - 1, sel);
357         Fieldable[] fieldables = doc.getFieldables("bin1");
358         assertNotNull(fieldables);
359         assertEquals(1, fieldables.length);
360         Fieldable fb1 = fieldables[0];
361         assertTrue(fb1.isBinary());
362         assertEquals(bin.length, fb1.getBinaryLength());
363         data1 = fb1.getBinaryValue();
364         assertEquals(bin.length, fb1.getBinaryLength());
365         for (int i = 0; i < bin.length; i++) {
366           assertEquals(bin[i], data1[i + fb1.getBinaryOffset()]);
367         }
368         reader.close();
369         // force merge
370
371
372         writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
373         writer.forceMerge(1);
374         writer.close();
375         reader = IndexReader.open(dir, false);
376         doc = reader.document(reader.maxDoc() - 1);
377         fields = doc.getFields("bin1");
378         assertNotNull(fields);
379         assertEquals(1, fields.length);
380         b1 = fields[0];
381         assertTrue(b1.isBinary());
382         data1 = b1.getBinaryValue();
383         assertEquals(bin.length, b1.getBinaryLength());
384         for (int i = 0; i < bin.length; i++) {
385           assertEquals(bin[i], data1[i + b1.getBinaryOffset()]);
386         }
387         reader.close();
388         dir.close();
389     }
390
391     // Make sure attempts to make changes after reader is
392     // closed throws IOException:
393     public void testChangesAfterClose() throws IOException {
394         Directory dir = newDirectory();
395
396         IndexWriter writer = null;
397         IndexReader reader = null;
398         Term searchTerm = new Term("content", "aaa");
399
400         //  add 11 documents with term : aaa
401         writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
402         for (int i = 0; i < 11; i++) {
403             addDoc(writer, searchTerm.text());
404         }
405         writer.close();
406
407         reader = IndexReader.open(dir, false);
408
409         // Close reader:
410         reader.close();
411
412         // Then, try to make changes:
413         try {
414           reader.deleteDocument(4);
415           fail("deleteDocument after close failed to throw IOException");
416         } catch (AlreadyClosedException e) {
417           // expected
418         }
419
420         try {
421           reader.setNorm(5, "aaa", 2.0f);
422           fail("setNorm after close failed to throw IOException");
423         } catch (AlreadyClosedException e) {
424           // expected
425         }
426
427         try {
428           reader.undeleteAll();
429           fail("undeleteAll after close failed to throw IOException");
430         } catch (AlreadyClosedException e) {
431           // expected
432         }
433         dir.close();
434     }
435
436     // Make sure we get lock obtain failed exception with 2 writers:
437     public void testLockObtainFailed() throws IOException {
438         Directory dir = newDirectory();
439
440         Term searchTerm = new Term("content", "aaa");
441
442         //  add 11 documents with term : aaa
443         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
444         writer.commit();
445         for (int i = 0; i < 11; i++) {
446             addDoc(writer, searchTerm.text());
447         }
448
449         // Create reader:
450         IndexReader reader = IndexReader.open(dir, false);
451
452         // Try to make changes
453         try {
454           reader.deleteDocument(4);
455           fail("deleteDocument should have hit LockObtainFailedException");
456         } catch (LockObtainFailedException e) {
457           // expected
458         }
459
460         try {
461           reader.setNorm(5, "aaa", 2.0f);
462           fail("setNorm should have hit LockObtainFailedException");
463         } catch (LockObtainFailedException e) {
464           // expected
465         }
466
467         try {
468           reader.undeleteAll();
469           fail("undeleteAll should have hit LockObtainFailedException");
470         } catch (LockObtainFailedException e) {
471           // expected
472         }
473         writer.close();
474         reader.close();
475         dir.close();
476     }
477
478     // Make sure you can set norms & commit even if a reader
479     // is open against the index:
480     public void testWritingNorms() throws IOException {
481         Directory dir = newDirectory();
482         IndexWriter writer;
483         IndexReader reader;
484         Term searchTerm = new Term("content", "aaa");
485
486         //  add 1 documents with term : aaa
487         writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
488         addDoc(writer, searchTerm.text());
489         writer.close();
490
491         //  now open reader & set norm for doc 0
492         reader = IndexReader.open(dir, false);
493         reader.setNorm(0, "content", (float) 2.0);
494
495         // we should be holding the write lock now:
496         assertTrue("locked", IndexWriter.isLocked(dir));
497
498         reader.commit();
499
500         // we should not be holding the write lock now:
501         assertTrue("not locked", !IndexWriter.isLocked(dir));
502
503         // open a 2nd reader:
504         IndexReader reader2 = IndexReader.open(dir, false);
505
506         // set norm again for doc 0
507         reader.setNorm(0, "content", (float) 3.0);
508         assertTrue("locked", IndexWriter.isLocked(dir));
509
510         reader.close();
511
512         // we should not be holding the write lock now:
513         assertTrue("not locked", !IndexWriter.isLocked(dir));
514
515         reader2.close();
516         dir.close();
517     }
518
519
520     // Make sure you can set norms & commit, and there are
521     // no extra norms files left:
522     public void testWritingNormsNoReader() throws IOException {
523         Directory dir = newDirectory();
524         IndexWriter writer = null;
525         IndexReader reader = null;
526         Term searchTerm = new Term("content", "aaa");
527
528         //  add 1 documents with term : aaa
529         writer  = new IndexWriter(
530             dir,
531             newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
532                 setMergePolicy(newLogMergePolicy(false))
533         );
534         addDoc(writer, searchTerm.text());
535         writer.close();
536
537         //  now open reader & set norm for doc 0 (writes to
538         //  _0_1.s0)
539         reader = IndexReader.open(dir, false);
540         reader.setNorm(0, "content", (float) 2.0);
541         reader.close();
542         
543         //  now open reader again & set norm for doc 0 (writes to _0_2.s0)
544         reader = IndexReader.open(dir, false);
545         reader.setNorm(0, "content", (float) 2.0);
546         reader.close();
547         assertFalse("failed to remove first generation norms file on writing second generation",
548                     dir.fileExists("_0_1.s0"));
549         
550         dir.close();
551     }
552
553     /* ??? public void testOpenEmptyDirectory() throws IOException{
554       String dirName = "test.empty";
555       File fileDirName = new File(dirName);
556       if (!fileDirName.exists()) {
557         fileDirName.mkdir();
558       }
559       try {
560         IndexReader.open(fileDirName);
561         fail("opening IndexReader on empty directory failed to produce FileNotFoundException");
562       } catch (FileNotFoundException e) {
563         // GOOD
564       }
565       rmDir(fileDirName);
566     }*/
567     
568   public void testFilesOpenClose() throws IOException {
569         // Create initial data set
570         File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
571         Directory dir = newFSDirectory(dirFile);
572         IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
573         addDoc(writer, "test");
574         writer.close();
575         dir.close();
576
577         // Try to erase the data - this ensures that the writer closed all files
578         _TestUtil.rmDir(dirFile);
579         dir = newFSDirectory(dirFile);
580
581         // Now create the data set again, just as before
582         writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
583         addDoc(writer, "test");
584         writer.close();
585         dir.close();
586
587         // Now open existing directory and test that reader closes all files
588         dir = newFSDirectory(dirFile);
589         IndexReader reader1 = IndexReader.open(dir, false);
590         reader1.close();
591         dir.close();
592
593         // The following will fail if reader did not close
594         // all files
595         _TestUtil.rmDir(dirFile);
596     }
597
598     public void testLastModified() throws Exception {
599       for(int i=0;i<2;i++) {
600         final Directory dir = newDirectory();
601         assertFalse(IndexReader.indexExists(dir));
602         IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
603         addDocumentWithFields(writer);
604         assertTrue(IndexWriter.isLocked(dir));          // writer open, so dir is locked
605         writer.close();
606         assertTrue(IndexReader.indexExists(dir));
607         IndexReader reader = IndexReader.open(dir, false);
608         assertFalse(IndexWriter.isLocked(dir));         // reader only, no lock
609         long version = IndexReader.lastModified(dir);
610         if (i == 1) {
611           long version2 = IndexReader.lastModified(dir);
612           assertEquals(version, version2);
613         }
614         reader.close();
615         // modify index and check version has been
616         // incremented:
617         Thread.sleep(1000);
618
619         writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
620         addDocumentWithFields(writer);
621         writer.close();
622         reader = IndexReader.open(dir, false);
623         assertTrue("old lastModified is " + version + "; new lastModified is " + IndexReader.lastModified(dir), version <= IndexReader.lastModified(dir));
624         reader.close();
625         dir.close();
626       }
627     }
628
629     public void testVersion() throws IOException {
630       Directory dir = newDirectory();
631       assertFalse(IndexReader.indexExists(dir));
632       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
633       addDocumentWithFields(writer);
634       assertTrue(IndexWriter.isLocked(dir));            // writer open, so dir is locked
635       writer.close();
636       assertTrue(IndexReader.indexExists(dir));
637       IndexReader reader = IndexReader.open(dir, false);
638       assertFalse(IndexWriter.isLocked(dir));           // reader only, no lock
639       long version = IndexReader.getCurrentVersion(dir);
640       reader.close();
641       // modify index and check version has been
642       // incremented:
643       writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
644       addDocumentWithFields(writer);
645       writer.close();
646       reader = IndexReader.open(dir, false);
647       assertTrue("old version is " + version + "; new version is " + IndexReader.getCurrentVersion(dir), version < IndexReader.getCurrentVersion(dir));
648       reader.close();
649       dir.close();
650     }
651
652     public void testLock() throws IOException {
653       Directory dir = newDirectory();
654       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
655       addDocumentWithFields(writer);
656       writer.close();
657       writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
658       IndexReader reader = IndexReader.open(dir, false);
659       try {
660         reader.deleteDocument(0);
661         fail("expected lock");
662       } catch(IOException e) {
663         // expected exception
664       }
665       try {
666         IndexWriter.unlock(dir);                // this should not be done in the real world! 
667       } catch (LockReleaseFailedException lrfe) {
668         writer.close();
669       }
670       reader.deleteDocument(0);
671       reader.close();
672       writer.close();
673       dir.close();
674     }
675
676     public void testDocsOutOfOrderJIRA140() throws IOException {
677       Directory dir = newDirectory();      
678       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
679       for(int i=0;i<11;i++) {
680         addDoc(writer, "aaa");
681       }
682       writer.close();
683       IndexReader reader = IndexReader.open(dir, false);
684
685       // Try to delete an invalid docId, yet, within range
686       // of the final bits of the BitVector:
687
688       boolean gotException = false;
689       try {
690         reader.deleteDocument(11);
691       } catch (ArrayIndexOutOfBoundsException e) {
692         gotException = true;
693       }
694       reader.close();
695
696       writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
697
698       // We must add more docs to get a new segment written
699       for(int i=0;i<11;i++) {
700         addDoc(writer, "aaa");
701       }
702
703       // Without the fix for LUCENE-140 this call will
704       // [incorrectly] hit a "docs out of order"
705       // IllegalStateException because above out-of-bounds
706       // deleteDocument corrupted the index:
707       writer.forceMerge(1);
708       writer.close();
709       if (!gotException) {
710         fail("delete of out-of-bounds doc number failed to hit exception");
711       }
712       dir.close();
713     }
714
715     public void testExceptionReleaseWriteLockJIRA768() throws IOException {
716
717       Directory dir = newDirectory();      
718       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
719       addDoc(writer, "aaa");
720       writer.close();
721
722       IndexReader reader = IndexReader.open(dir, false);
723       try {
724         reader.deleteDocument(1);
725         fail("did not hit exception when deleting an invalid doc number");
726       } catch (ArrayIndexOutOfBoundsException e) {
727         // expected
728       }
729       reader.close();
730       if (IndexWriter.isLocked(dir)) {
731         fail("write lock is still held after close");
732       }
733
734       reader = IndexReader.open(dir, false);
735       try {
736         reader.setNorm(1, "content", (float) 2.0);
737         fail("did not hit exception when calling setNorm on an invalid doc number");
738       } catch (ArrayIndexOutOfBoundsException e) {
739         // expected
740       }
741       reader.close();
742       if (IndexWriter.isLocked(dir)) {
743         fail("write lock is still held after close");
744       }
745       dir.close();
746     }
747
748     private String arrayToString(String[] l) {
749       String s = "";
750       for(int i=0;i<l.length;i++) {
751         if (i > 0) {
752           s += "\n    ";
753         }
754         s += l[i];
755       }
756       return s;
757     }
758
759     public void testOpenReaderAfterDelete() throws IOException {
760       File dirFile = _TestUtil.getTempDir("deletetest");
761       Directory dir = newFSDirectory(dirFile);
762       try {
763         IndexReader.open(dir, false);
764         fail("expected FileNotFoundException");
765       } catch (FileNotFoundException e) {
766         // expected
767       }
768
769       dirFile.delete();
770
771       // Make sure we still get a CorruptIndexException (not NPE):
772       try {
773         IndexReader.open(dir, false);
774         fail("expected FileNotFoundException");
775       } catch (FileNotFoundException e) {
776         // expected
777       }
778       
779       dir.close();
780     }
781
782     static void addDocumentWithFields(IndexWriter writer) throws IOException
783     {
784         Document doc = new Document();
785         doc.add(newField("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
786         doc.add(newField("text","test1", Field.Store.YES, Field.Index.ANALYZED));
787         doc.add(newField("unindexed","test1", Field.Store.YES, Field.Index.NO));
788         doc.add(newField("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
789         writer.addDocument(doc);
790     }
791
792     static void addDocumentWithDifferentFields(IndexWriter writer) throws IOException
793     {
794         Document doc = new Document();
795         doc.add(newField("keyword2","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
796         doc.add(newField("text2","test1", Field.Store.YES, Field.Index.ANALYZED));
797         doc.add(newField("unindexed2","test1", Field.Store.YES, Field.Index.NO));
798         doc.add(newField("unstored2","test1", Field.Store.NO, Field.Index.ANALYZED));
799         writer.addDocument(doc);
800     }
801
802     static void addDocumentWithTermVectorFields(IndexWriter writer) throws IOException
803     {
804         Document doc = new Document();
805         doc.add(newField("tvnot","tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
806         doc.add(newField("termvector","termvector", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
807         doc.add(newField("tvoffset","tvoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
808         doc.add(newField("tvposition","tvposition", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
809         doc.add(newField("tvpositionoffset","tvpositionoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
810         
811         writer.addDocument(doc);
812     }
813     
814     static void addDoc(IndexWriter writer, String value) throws IOException {
815         Document doc = new Document();
816         doc.add(newField("content", value, Field.Store.NO, Field.Index.ANALYZED));
817         writer.addDocument(doc);
818     }
819
820     public static void assertIndexEquals(IndexReader index1, IndexReader index2) throws IOException {
821       assertEquals("IndexReaders have different values for numDocs.", index1.numDocs(), index2.numDocs());
822       assertEquals("IndexReaders have different values for maxDoc.", index1.maxDoc(), index2.maxDoc());
823       assertEquals("Only one IndexReader has deletions.", index1.hasDeletions(), index2.hasDeletions());
824       if (!(index1 instanceof ParallelReader)) {
825         assertEquals("Single segment test differs.", index1.getSequentialSubReaders().length == 1, index2.getSequentialSubReaders().length == 1);
826       }
827       
828       // check field names
829       Collection<String> fields1 = index1.getFieldNames(FieldOption.ALL);
830       Collection<String> fields2 = index1.getFieldNames(FieldOption.ALL);
831       assertEquals("IndexReaders have different numbers of fields.", fields1.size(), fields2.size());
832       Iterator<String> it1 = fields1.iterator();
833       Iterator<String> it2 = fields1.iterator();
834       while (it1.hasNext()) {
835         assertEquals("Different field names.", it1.next(), it2.next());
836       }
837       
838       // check norms
839       it1 = fields1.iterator();
840       while (it1.hasNext()) {
841         String curField = it1.next();
842         byte[] norms1 = index1.norms(curField);
843         byte[] norms2 = index2.norms(curField);
844         if (norms1 != null && norms2 != null)
845         {
846           assertEquals(norms1.length, norms2.length);
847                 for (int i = 0; i < norms1.length; i++) {
848                   assertEquals("Norm different for doc " + i + " and field '" + curField + "'.", norms1[i], norms2[i]);
849                 }
850         }
851         else
852         {
853           assertSame(norms1, norms2);
854         }
855       }
856       
857       // check deletions
858       for (int i = 0; i < index1.maxDoc(); i++) {
859         assertEquals("Doc " + i + " only deleted in one index.", index1.isDeleted(i), index2.isDeleted(i));
860       }
861       
862       // check stored fields
863       for (int i = 0; i < index1.maxDoc(); i++) {
864         if (!index1.isDeleted(i)) {
865           Document doc1 = index1.document(i);
866           Document doc2 = index2.document(i);
867           List<Fieldable> fieldable1 = doc1.getFields();
868           List<Fieldable> fieldable2 = doc2.getFields();
869           assertEquals("Different numbers of fields for doc " + i + ".", fieldable1.size(), fieldable2.size());
870           Iterator<Fieldable> itField1 = fieldable1.iterator();
871           Iterator<Fieldable> itField2 = fieldable2.iterator();
872           while (itField1.hasNext()) {
873             Field curField1 = (Field) itField1.next();
874             Field curField2 = (Field) itField2.next();
875             assertEquals("Different fields names for doc " + i + ".", curField1.name(), curField2.name());
876             assertEquals("Different field values for doc " + i + ".", curField1.stringValue(), curField2.stringValue());
877           }          
878         }
879       }
880       
881       // check dictionary and posting lists
882       TermEnum enum1 = index1.terms();
883       TermEnum enum2 = index2.terms();
884       TermPositions tp1 = index1.termPositions();
885       TermPositions tp2 = index2.termPositions();
886       while(enum1.next()) {
887         assertTrue(enum2.next());
888         assertEquals("Different term in dictionary.", enum1.term(), enum2.term());
889         tp1.seek(enum1.term());
890         tp2.seek(enum1.term());
891         while(tp1.next()) {
892           assertTrue(tp2.next());
893           assertEquals("Different doc id in postinglist of term " + enum1.term() + ".", tp1.doc(), tp2.doc());
894           assertEquals("Different term frequence in postinglist of term " + enum1.term() + ".", tp1.freq(), tp2.freq());
895           for (int i = 0; i < tp1.freq(); i++) {
896             assertEquals("Different positions in postinglist of term " + enum1.term() + ".", tp1.nextPosition(), tp2.nextPosition());
897           }
898         }
899       }
900     }
901
902     public void testGetIndexCommit() throws IOException {
903
904       Directory d = newDirectory();
905
906       // set up writer
907       IndexWriter writer = new IndexWriter(
908           d,
909           newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
910               setMaxBufferedDocs(2).
911               setMergePolicy(newLogMergePolicy(10))
912       );
913       for(int i=0;i<27;i++)
914         addDocumentWithFields(writer);
915       writer.close();
916
917       SegmentInfos sis = new SegmentInfos();
918       sis.read(d);
919       IndexReader r = IndexReader.open(d, false);
920       IndexCommit c = r.getIndexCommit();
921
922       assertEquals(sis.getCurrentSegmentFileName(), c.getSegmentsFileName());
923
924       assertTrue(c.equals(r.getIndexCommit()));
925
926       // Change the index
927       writer = new IndexWriter(
928           d,
929           newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
930               setOpenMode(OpenMode.APPEND).
931               setMaxBufferedDocs(2).
932               setMergePolicy(newLogMergePolicy(10))
933       );
934       for(int i=0;i<7;i++)
935         addDocumentWithFields(writer);
936       writer.close();
937
938       IndexReader r2 = IndexReader.openIfChanged(r);
939       assertNotNull(r2);
940       assertFalse(c.equals(r2.getIndexCommit()));
941       assertFalse(r2.getIndexCommit().getSegmentCount() == 1);
942       r2.close();
943
944       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
945         new MockAnalyzer(random))
946         .setOpenMode(OpenMode.APPEND));
947       writer.forceMerge(1);
948       writer.close();
949
950       r2 = IndexReader.openIfChanged(r);
951       assertNotNull(r2);
952       assertNull(IndexReader.openIfChanged(r2));
953       assertEquals(1, r2.getIndexCommit().getSegmentCount());
954
955       r.close();
956       r2.close();
957       d.close();
958     }      
959
960     public void testReadOnly() throws Throwable {
961       Directory d = newDirectory();
962       IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
963         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
964       addDocumentWithFields(writer);
965       writer.commit();
966       addDocumentWithFields(writer);
967       writer.close();
968
969       IndexReader r = IndexReader.open(d, true);
970       try {
971         r.deleteDocument(0);
972         fail();
973       } catch (UnsupportedOperationException uoe) {
974         // expected
975       }
976
977       writer = new IndexWriter(
978           d,
979           newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
980               setOpenMode(OpenMode.APPEND).
981               setMergePolicy(newLogMergePolicy(10))
982       );
983       addDocumentWithFields(writer);
984       writer.close();
985
986       // Make sure reopen is still readonly:
987       IndexReader r2 = IndexReader.openIfChanged(r);
988       assertNotNull(r2);
989       r.close();
990
991       assertFalse(r == r2);
992
993       try {
994         r2.deleteDocument(0);
995         fail();
996       } catch (UnsupportedOperationException uoe) {
997         // expected
998       }
999
1000       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
1001         new MockAnalyzer(random))
1002         .setOpenMode(OpenMode.APPEND));
1003       writer.forceMerge(1);
1004       writer.close();
1005
1006       // Make sure reopen to a single segment is still readonly:
1007       IndexReader r3 = IndexReader.openIfChanged(r2);
1008       assertNotNull(r3);
1009       assertFalse(r3 == r2);
1010       r2.close();
1011       
1012       assertFalse(r == r2);
1013
1014       try {
1015         r3.deleteDocument(0);
1016         fail();
1017       } catch (UnsupportedOperationException uoe) {
1018         // expected
1019       }
1020
1021       // Make sure write lock isn't held
1022       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
1023           new MockAnalyzer(random))
1024       .setOpenMode(OpenMode.APPEND));
1025       writer.close();
1026
1027       r3.close();
1028       d.close();
1029     }
1030
1031
1032   // LUCENE-1474
1033   public void testIndexReader() throws Exception {
1034     Directory dir = newDirectory();
1035     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
1036         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1037     writer.addDocument(createDocument("a"));
1038     writer.addDocument(createDocument("b"));
1039     writer.addDocument(createDocument("c"));
1040     writer.close();
1041     IndexReader reader = IndexReader.open(dir, false);
1042     reader.deleteDocuments(new Term("id", "a"));
1043     reader.flush();
1044     reader.deleteDocuments(new Term("id", "b"));
1045     reader.close();
1046     IndexReader.open(dir,true).close();
1047     dir.close();
1048   }
1049
1050   static Document createDocument(String id) {
1051     Document doc = new Document();
1052     doc.add(newField("id", id, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
1053     return doc;
1054   }
1055
1056   // LUCENE-1468 -- make sure on attempting to open an
1057   // IndexReader on a non-existent directory, you get a
1058   // good exception
1059   public void testNoDir() throws Throwable {
1060     Directory dir = newFSDirectory(_TestUtil.getTempDir("doesnotexist"));
1061     try {
1062       IndexReader.open(dir, true);
1063       fail("did not hit expected exception");
1064     } catch (NoSuchDirectoryException nsde) {
1065       // expected
1066     }
1067     dir.close();
1068   }
1069
1070   // LUCENE-1509
1071   public void testNoDupCommitFileNames() throws Throwable {
1072
1073     Directory dir = newDirectory();
1074     
1075     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
1076         TEST_VERSION_CURRENT, new MockAnalyzer(random))
1077         .setMaxBufferedDocs(2));
1078     writer.addDocument(createDocument("a"));
1079     writer.addDocument(createDocument("a"));
1080     writer.addDocument(createDocument("a"));
1081     writer.close();
1082     
1083     Collection<IndexCommit> commits = IndexReader.listCommits(dir);
1084     for (final IndexCommit commit : commits) {
1085       Collection<String> files = commit.getFileNames();
1086       HashSet<String> seen = new HashSet<String>();
1087       for (final String fileName : files) { 
1088         assertTrue("file " + fileName + " was duplicated", !seen.contains(fileName));
1089         seen.add(fileName);
1090       }
1091     }
1092
1093     dir.close();
1094   }
1095
1096   // LUCENE-1579: Ensure that on a cloned reader, segments
1097   // reuse the doc values arrays in FieldCache
1098   public void testFieldCacheReuseAfterClone() throws Exception {
1099     Directory dir = newDirectory();
1100     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1101     Document doc = new Document();
1102     doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
1103     writer.addDocument(doc);
1104     writer.close();
1105
1106     // Open reader
1107     IndexReader r = SegmentReader.getOnlySegmentReader(dir);
1108     final int[] ints = FieldCache.DEFAULT.getInts(r, "number");
1109     assertEquals(1, ints.length);
1110     assertEquals(17, ints[0]);
1111
1112     // Clone reader
1113     IndexReader r2 = (IndexReader) r.clone();
1114     r.close();
1115     assertTrue(r2 != r);
1116     final int[] ints2 = FieldCache.DEFAULT.getInts(r2, "number");
1117     r2.close();
1118
1119     assertEquals(1, ints2.length);
1120     assertEquals(17, ints2[0]);
1121     assertTrue(ints == ints2);
1122
1123     dir.close();
1124   }
1125
1126   // LUCENE-1579: Ensure that on a reopened reader, that any
1127   // shared segments reuse the doc values arrays in
1128   // FieldCache
1129   public void testFieldCacheReuseAfterReopen() throws Exception {
1130     Directory dir = newDirectory();
1131     IndexWriter writer = new IndexWriter(
1132         dir,
1133         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
1134             setMergePolicy(newLogMergePolicy(10))
1135     );
1136     Document doc = new Document();
1137     doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
1138     ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
1139     writer.addDocument(doc);
1140     writer.commit();
1141
1142     // Open reader1
1143     IndexReader r = IndexReader.open(dir, false);
1144     IndexReader r1 = SegmentReader.getOnlySegmentReader(r);
1145     final int[] ints = FieldCache.DEFAULT.getInts(r1, "number");
1146     assertEquals(1, ints.length);
1147     assertEquals(17, ints[0]);
1148
1149     // Add new segment
1150     writer.addDocument(doc);
1151     writer.commit();
1152
1153     // Reopen reader1 --> reader2
1154     IndexReader r2 = IndexReader.openIfChanged(r);
1155     assertNotNull(r2);
1156     r.close();
1157     IndexReader sub0 = r2.getSequentialSubReaders()[0];
1158     final int[] ints2 = FieldCache.DEFAULT.getInts(sub0, "number");
1159     r2.close();
1160     assertTrue(ints == ints2);
1161
1162     writer.close();
1163     dir.close();
1164   }
1165
1166   // LUCENE-1586: getUniqueTermCount
1167   public void testUniqueTermCount() throws Exception {
1168     Directory dir = newDirectory();
1169     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1170     Document doc = new Document();
1171     doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
1172     doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
1173     writer.addDocument(doc);
1174     writer.addDocument(doc);
1175     writer.commit();
1176
1177     IndexReader r = IndexReader.open(dir, false);
1178     IndexReader r1 = SegmentReader.getOnlySegmentReader(r);
1179     assertEquals(36, r1.getUniqueTermCount());
1180     writer.addDocument(doc);
1181     writer.commit();
1182     IndexReader r2 = IndexReader.openIfChanged(r);
1183     assertNotNull(r2);
1184     r.close();
1185     try {
1186       r2.getUniqueTermCount();
1187       fail("expected exception");
1188     } catch (UnsupportedOperationException uoe) {
1189       // expected
1190     }
1191     IndexReader[] subs = r2.getSequentialSubReaders();
1192     for(int i=0;i<subs.length;i++) {
1193       assertEquals(36, subs[i].getUniqueTermCount());
1194     }
1195     r2.close();
1196     writer.close();
1197     dir.close();
1198   }
1199
1200   // LUCENE-1609: don't load terms index
1201   public void testNoTermsIndex() throws Throwable {
1202     Directory dir = newDirectory();
1203     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1204     Document doc = new Document();
1205     doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
1206     doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
1207     writer.addDocument(doc);
1208     writer.addDocument(doc);
1209     writer.close();
1210
1211     IndexReader r = IndexReader.open(dir, null, true, -1);
1212     try {
1213       r.docFreq(new Term("field", "f"));
1214       fail("did not hit expected exception");
1215     } catch (IllegalStateException ise) {
1216       // expected
1217     }
1218     assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
1219
1220     assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
1221     writer = new IndexWriter(
1222         dir,
1223         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
1224             setMergePolicy(newLogMergePolicy(10))
1225     );
1226     writer.addDocument(doc);
1227     writer.close();
1228
1229     // LUCENE-1718: ensure re-open carries over no terms index:
1230     IndexReader r2 = IndexReader.openIfChanged(r);
1231     assertNotNull(r2);
1232     assertNull(IndexReader.openIfChanged(r2));
1233     r.close();
1234     IndexReader[] subReaders = r2.getSequentialSubReaders();
1235     assertEquals(2, subReaders.length);
1236     for(int i=0;i<2;i++) {
1237       assertFalse(((SegmentReader) subReaders[i]).termsIndexLoaded());
1238     }
1239     r2.close();
1240     dir.close();
1241   }
1242
1243   // LUCENE-2046
1244   public void testPrepareCommitIsCurrent() throws Throwable {
1245     Directory dir = newDirectory();
1246     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
1247         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1248     writer.commit();
1249     Document doc = new Document();
1250     writer.addDocument(doc);
1251     IndexReader r = IndexReader.open(dir, true);
1252     assertTrue(r.isCurrent());
1253     writer.addDocument(doc);
1254     writer.prepareCommit();
1255     assertTrue(r.isCurrent());
1256     IndexReader r2 = IndexReader.openIfChanged(r);
1257     assertNull(r2);
1258     writer.commit();
1259     assertFalse(r.isCurrent());
1260     writer.close();
1261     r.close();
1262     dir.close();
1263   }
1264   
1265   // LUCENE-2753
1266   public void testListCommits() throws Exception {
1267     Directory dir = newDirectory();
1268     SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
1269     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
1270         TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(sdp));
1271     writer.addDocument(new Document());
1272     writer.commit();
1273     sdp.snapshot("c1");
1274     writer.addDocument(new Document());
1275     writer.commit();
1276     sdp.snapshot("c2");
1277     writer.addDocument(new Document());
1278     writer.commit();
1279     sdp.snapshot("c3");
1280     writer.close();
1281     long currentGen = 0;
1282     for (IndexCommit ic : IndexReader.listCommits(dir)) {
1283       assertTrue("currentGen=" + currentGen + " commitGen=" + ic.getGeneration(), currentGen < ic.getGeneration());
1284       currentGen = ic.getGeneration();
1285     }
1286     dir.close();
1287   }
1288
1289   // LUCENE-2812
1290   public void testIndexExists() throws Exception {
1291     Directory dir = newDirectory();
1292     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1293     writer.addDocument(new Document());
1294     writer.prepareCommit();
1295     assertFalse(IndexReader.indexExists(dir));
1296     writer.close();
1297     assertTrue(IndexReader.indexExists(dir));
1298     dir.close();
1299   }
1300
1301   // LUCENE-2474
1302   public void testReaderFinishedListener() throws Exception {
1303     Directory dir = newDirectory();
1304     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
1305     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
1306     writer.setInfoStream(VERBOSE ? System.out : null);
1307     writer.addDocument(new Document());
1308     writer.commit();
1309     writer.addDocument(new Document());
1310     writer.commit();
1311     final IndexReader reader = writer.getReader();
1312     final int[] closeCount = new int[1];
1313     final IndexReader.ReaderFinishedListener listener = new IndexReader.ReaderFinishedListener() {
1314       public void finished(IndexReader reader) {
1315         closeCount[0]++;
1316       }
1317     };
1318
1319     reader.addReaderFinishedListener(listener);
1320
1321     reader.close();
1322
1323     // Just the top reader
1324     assertEquals(1, closeCount[0]);
1325     writer.close();
1326
1327     // Now also the subs
1328     assertEquals(3, closeCount[0]);
1329
1330     IndexReader reader2 = IndexReader.open(dir);
1331     reader2.addReaderFinishedListener(listener);
1332
1333     closeCount[0] = 0;
1334     reader2.close();
1335     assertEquals(3, closeCount[0]);
1336     dir.close();
1337   }
1338
1339   public void testOOBDocID() throws Exception {
1340     Directory dir = newDirectory();
1341     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1342     writer.addDocument(new Document());
1343     IndexReader r = writer.getReader();
1344     writer.close();
1345     r.document(0);
1346     try {
1347       r.document(1);
1348       fail("did not hit exception");
1349     } catch (IllegalArgumentException iae) {
1350       // expected
1351     }
1352     r.close();
1353     dir.close();
1354   }
1355   
1356   public void testTryIncRef() throws CorruptIndexException, LockObtainFailedException, IOException {
1357     Directory dir = newDirectory();
1358     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1359     writer.addDocument(new Document());
1360     writer.commit();
1361     IndexReader r = IndexReader.open(dir);
1362     assertTrue(r.tryIncRef());
1363     r.decRef();
1364     r.close();
1365     assertFalse(r.tryIncRef());
1366     writer.close();
1367     dir.close();
1368   }
1369   
1370   public void testStressTryIncRef() throws CorruptIndexException, LockObtainFailedException, IOException, InterruptedException {
1371     Directory dir = newDirectory();
1372     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1373     writer.addDocument(new Document());
1374     writer.commit();
1375     IndexReader r = IndexReader.open(dir);
1376     int numThreads = atLeast(2);
1377     
1378     IncThread[] threads = new IncThread[numThreads];
1379     for (int i = 0; i < threads.length; i++) {
1380       threads[i] = new IncThread(r, random);
1381       threads[i].start();
1382     }
1383     Thread.sleep(100);
1384
1385     assertTrue(r.tryIncRef());
1386     r.decRef();
1387     r.close();
1388
1389     for (int i = 0; i < threads.length; i++) {
1390       threads[i].join();
1391       assertNull(threads[i].failed);
1392     }
1393     assertFalse(r.tryIncRef());
1394     writer.close();
1395     dir.close();
1396   }
1397   
1398   static class IncThread extends Thread {
1399     final IndexReader toInc;
1400     final Random random;
1401     Throwable failed;
1402     
1403     IncThread(IndexReader toInc, Random random) {
1404       this.toInc = toInc;
1405       this.random = random;
1406     }
1407     
1408     @Override
1409     public void run() {
1410       try {
1411         while (toInc.tryIncRef()) {
1412           assertFalse(toInc.hasDeletions());
1413           toInc.decRef();
1414         }
1415         assertFalse(toInc.tryIncRef());
1416       } catch (Throwable e) {
1417         failed = e;
1418       }
1419     }
1420   }
1421 }