pylucene 3.5.0-3
[pylucene.git] / lucene-java-3.5.0 / lucene / backwards / src / test / org / apache / lucene / index / TestIndexReader.java
1 package org.apache.lucene.index;
2
3 /**
4  * Licensed to the Apache Software Foundation (ASF) under one or more
5  * contributor license agreements.  See the NOTICE file distributed with
6  * this work for additional information regarding copyright ownership.
7  * The ASF licenses this file to You under the Apache License, Version 2.0
8  * (the "License"); you may not use this file except in compliance with
9  * the License.  You may obtain a copy of the License at
10  *
11  *     http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19
20
21 import java.io.File;
22 import java.io.FileNotFoundException;
23 import java.io.IOException;
24 import java.util.Collection;
25 import java.util.HashSet;
26 import java.util.Iterator;
27 import java.util.List;
28 import java.util.Map;
29 import java.util.HashMap;
30 import java.util.Set;
31 import java.util.SortedSet;
32
33 import org.apache.lucene.analysis.MockAnalyzer;
34 import org.apache.lucene.analysis.WhitespaceAnalyzer;
35 import org.apache.lucene.document.Document;
36 import org.apache.lucene.document.Field;
37 import org.apache.lucene.document.FieldSelector;
38 import org.apache.lucene.document.Fieldable;
39 import org.apache.lucene.document.SetBasedFieldSelector;
40 import org.apache.lucene.index.IndexReader.FieldOption;
41 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
42 import org.apache.lucene.search.FieldCache;
43 import org.apache.lucene.store.AlreadyClosedException;
44 import org.apache.lucene.store.Directory;
45 import org.apache.lucene.store.LockObtainFailedException;
46 import org.apache.lucene.store.NoSuchDirectoryException;
47 import org.apache.lucene.store.LockReleaseFailedException;
48 import org.apache.lucene.util.LuceneTestCase;
49 import org.apache.lucene.util._TestUtil;
50
51 public class TestIndexReader extends LuceneTestCase {
52     
53     public void testCommitUserData() throws Exception {
54       Directory d = newDirectory();
55
56       Map<String,String> commitUserData = new HashMap<String,String>();
57       commitUserData.put("foo", "fighters");
58       
59       // set up writer
60       IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
61           TEST_VERSION_CURRENT, new MockAnalyzer(random))
62       .setMaxBufferedDocs(2));
63       for(int i=0;i<27;i++)
64         addDocumentWithFields(writer);
65       writer.close();
66       
67       IndexReader r = IndexReader.open(d, false);
68       r.deleteDocument(5);
69       r.flush(commitUserData);
70       IndexCommit c = r.getIndexCommit();
71       r.close();
72       
73       SegmentInfos sis = new SegmentInfos();
74       sis.read(d);
75       IndexReader r2 = IndexReader.open(d, false);
76       assertEquals(c.getUserData(), commitUserData);
77
78       assertEquals(sis.getCurrentSegmentFileName(), c.getSegmentsFileName());
79
80       // Change the index
81       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
82           new MockAnalyzer(random)).setOpenMode(
83               OpenMode.APPEND).setMaxBufferedDocs(2));
84       for(int i=0;i<7;i++)
85         addDocumentWithFields(writer);
86       writer.close();
87
88       IndexReader r3 = r2.reopen();
89       assertFalse(c.equals(r3.getIndexCommit()));
90       r3.close();
91
92       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
93         new MockAnalyzer(random))
94         .setOpenMode(OpenMode.APPEND));
95       writer.optimize();
96       writer.close();
97
98       r3 = r2.reopen();
99       r2.close();
100       r3.close();
101       d.close();
102     }
103     
104     public void testIsCurrent() throws Exception {
105       Directory d = newDirectory();
106       IndexWriter writer = new IndexWriter(d, newIndexWriterConfig( 
107         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
108       addDocumentWithFields(writer);
109       writer.close();
110       // set up reader:
111       IndexReader reader = IndexReader.open(d, false);
112       assertTrue(reader.isCurrent());
113       // modify index by adding another document:
114       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
115           new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
116       addDocumentWithFields(writer);
117       writer.close();
118       assertFalse(reader.isCurrent());
119       // re-create index:
120       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
121           new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
122       addDocumentWithFields(writer);
123       writer.close();
124       assertFalse(reader.isCurrent());
125       reader.close();
126       d.close();
127     }
128
129     /**
130      * Tests the IndexReader.getFieldNames implementation
131      * @throws Exception on error
132      */
133     public void testGetFieldNames() throws Exception {
134         Directory d = newDirectory();
135         // set up writer
136         IndexWriter writer = new IndexWriter(
137             d,
138             newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
139         );
140
141         Document doc = new Document();
142         doc.add(new Field("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
143         doc.add(new Field("text","test1", Field.Store.YES, Field.Index.ANALYZED));
144         doc.add(new Field("unindexed","test1", Field.Store.YES, Field.Index.NO));
145         doc.add(new Field("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
146         writer.addDocument(doc);
147
148         writer.close();
149         // set up reader
150         IndexReader reader = IndexReader.open(d, false);
151         Collection<String> fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
152         assertTrue(fieldNames.contains("keyword"));
153         assertTrue(fieldNames.contains("text"));
154         assertTrue(fieldNames.contains("unindexed"));
155         assertTrue(fieldNames.contains("unstored"));
156         reader.close();
157         // add more documents
158         writer = new IndexWriter(
159             d,
160             newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
161                 setOpenMode(OpenMode.APPEND).
162                 setMergePolicy(newLogMergePolicy())
163         );
164         // want to get some more segments here
165         int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
166         for (int i = 0; i < 5*mergeFactor; i++) {
167           doc = new Document();
168           doc.add(new Field("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
169           doc.add(new Field("text","test1", Field.Store.YES, Field.Index.ANALYZED));
170           doc.add(new Field("unindexed","test1", Field.Store.YES, Field.Index.NO));
171           doc.add(new Field("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
172           writer.addDocument(doc);
173         }
174         // new fields are in some different segments (we hope)
175         for (int i = 0; i < 5*mergeFactor; i++) {
176           doc = new Document();
177           doc.add(new Field("keyword2","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
178           doc.add(new Field("text2","test1", Field.Store.YES, Field.Index.ANALYZED));
179           doc.add(new Field("unindexed2","test1", Field.Store.YES, Field.Index.NO));
180           doc.add(new Field("unstored2","test1", Field.Store.NO, Field.Index.ANALYZED));
181           writer.addDocument(doc);
182         }
183         // new termvector fields
184         for (int i = 0; i < 5*mergeFactor; i++) {
185           doc = new Document();
186           doc.add(new Field("tvnot","tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
187           doc.add(new Field("termvector","termvector", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
188           doc.add(new Field("tvoffset","tvoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
189           doc.add(new Field("tvposition","tvposition", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
190           doc.add(newField("tvpositionoffset","tvpositionoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
191           writer.addDocument(doc);
192         }
193         
194         writer.close();
195         // verify fields again
196         reader = IndexReader.open(d, false);
197         fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
198         assertEquals(13, fieldNames.size());    // the following fields
199         assertTrue(fieldNames.contains("keyword"));
200         assertTrue(fieldNames.contains("text"));
201         assertTrue(fieldNames.contains("unindexed"));
202         assertTrue(fieldNames.contains("unstored"));
203         assertTrue(fieldNames.contains("keyword2"));
204         assertTrue(fieldNames.contains("text2"));
205         assertTrue(fieldNames.contains("unindexed2"));
206         assertTrue(fieldNames.contains("unstored2"));
207         assertTrue(fieldNames.contains("tvnot"));
208         assertTrue(fieldNames.contains("termvector"));
209         assertTrue(fieldNames.contains("tvposition"));
210         assertTrue(fieldNames.contains("tvoffset"));
211         assertTrue(fieldNames.contains("tvpositionoffset"));
212         
213         // verify that only indexed fields were returned
214         fieldNames = reader.getFieldNames(IndexReader.FieldOption.INDEXED);
215         assertEquals(11, fieldNames.size());    // 6 original + the 5 termvector fields 
216         assertTrue(fieldNames.contains("keyword"));
217         assertTrue(fieldNames.contains("text"));
218         assertTrue(fieldNames.contains("unstored"));
219         assertTrue(fieldNames.contains("keyword2"));
220         assertTrue(fieldNames.contains("text2"));
221         assertTrue(fieldNames.contains("unstored2"));
222         assertTrue(fieldNames.contains("tvnot"));
223         assertTrue(fieldNames.contains("termvector"));
224         assertTrue(fieldNames.contains("tvposition"));
225         assertTrue(fieldNames.contains("tvoffset"));
226         assertTrue(fieldNames.contains("tvpositionoffset"));
227         
228         // verify that only unindexed fields were returned
229         fieldNames = reader.getFieldNames(IndexReader.FieldOption.UNINDEXED);
230         assertEquals(2, fieldNames.size());    // the following fields
231         assertTrue(fieldNames.contains("unindexed"));
232         assertTrue(fieldNames.contains("unindexed2"));
233                 
234         // verify index term vector fields  
235         fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR);
236         assertEquals(1, fieldNames.size());    // 1 field has term vector only
237         assertTrue(fieldNames.contains("termvector"));
238         
239         fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION);
240         assertEquals(1, fieldNames.size());    // 4 fields are indexed with term vectors
241         assertTrue(fieldNames.contains("tvposition"));
242         
243         fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET);
244         assertEquals(1, fieldNames.size());    // 4 fields are indexed with term vectors
245         assertTrue(fieldNames.contains("tvoffset"));
246                 
247         fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET);
248         assertEquals(1, fieldNames.size());    // 4 fields are indexed with term vectors
249         assertTrue(fieldNames.contains("tvpositionoffset"));
250         reader.close();
251         d.close();
252     }
253
254   public void testTermVectors() throws Exception {
255     Directory d = newDirectory();
256     // set up writer
257     IndexWriter writer = new IndexWriter(
258         d,
259         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
260             setMergePolicy(newLogMergePolicy())
261     );
262     // want to get some more segments here
263     // new termvector fields
264     int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
265     for (int i = 0; i < 5 * mergeFactor; i++) {
266       Document doc = new Document();
267         doc.add(new Field("tvnot","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
268         doc.add(new Field("termvector","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
269         doc.add(new Field("tvoffset","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
270         doc.add(new Field("tvposition","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
271         doc.add(new Field("tvpositionoffset","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
272
273         writer.addDocument(doc);
274     }
275     writer.close();
276     IndexReader reader = IndexReader.open(d, false);
277     FieldSortedTermVectorMapper mapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
278     reader.getTermFreqVector(0, mapper);
279     Map<String,SortedSet<TermVectorEntry>> map = mapper.getFieldToTerms();
280     assertTrue("map is null and it shouldn't be", map != null);
281     assertTrue("map Size: " + map.size() + " is not: " + 4, map.size() == 4);
282     Set<TermVectorEntry> set = map.get("termvector");
283     for (Iterator<TermVectorEntry> iterator = set.iterator(); iterator.hasNext();) {
284       TermVectorEntry entry =  iterator.next();
285       assertTrue("entry is null and it shouldn't be", entry != null);
286       if (VERBOSE) System.out.println("Entry: " + entry);
287     }
288     reader.close();
289     d.close();
290   }
291
292   static void assertTermDocsCount(String msg,
293                                      IndexReader reader,
294                                      Term term,
295                                      int expected)
296     throws IOException
297     {
298         TermDocs tdocs = null;
299
300         try {
301             tdocs = reader.termDocs(term);
302             assertNotNull(msg + ", null TermDocs", tdocs);
303             int count = 0;
304             while(tdocs.next()) {
305                 count++;
306             }
307             assertEquals(msg + ", count mismatch", expected, count);
308
309         } finally {
310             if (tdocs != null)
311                 tdocs.close();
312         }
313
314     }
315
316     
317     public void testBinaryFields() throws IOException {
318         Directory dir = newDirectory();
319         byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
320         
321         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
322         
323         for (int i = 0; i < 10; i++) {
324           addDoc(writer, "document number " + (i + 1));
325           addDocumentWithFields(writer);
326           addDocumentWithDifferentFields(writer);
327           addDocumentWithTermVectorFields(writer);
328         }
329         writer.close();
330         writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
331         Document doc = new Document();
332         doc.add(new Field("bin1", bin));
333         doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
334         writer.addDocument(doc);
335         writer.close();
336         IndexReader reader = IndexReader.open(dir, false);
337         doc = reader.document(reader.maxDoc() - 1);
338         Field[] fields = doc.getFields("bin1");
339         assertNotNull(fields);
340         assertEquals(1, fields.length);
341         Field b1 = fields[0];
342         assertTrue(b1.isBinary());
343         byte[] data1 = b1.getBinaryValue();
344         assertEquals(bin.length, b1.getBinaryLength());
345         for (int i = 0; i < bin.length; i++) {
346           assertEquals(bin[i], data1[i + b1.getBinaryOffset()]);
347         }
348         Set<String> lazyFields = new HashSet<String>();
349         lazyFields.add("bin1");
350         FieldSelector sel = new SetBasedFieldSelector(new HashSet<String>(), lazyFields);
351         doc = reader.document(reader.maxDoc() - 1, sel);
352         Fieldable[] fieldables = doc.getFieldables("bin1");
353         assertNotNull(fieldables);
354         assertEquals(1, fieldables.length);
355         Fieldable fb1 = fieldables[0];
356         assertTrue(fb1.isBinary());
357         assertEquals(bin.length, fb1.getBinaryLength());
358         data1 = fb1.getBinaryValue();
359         assertEquals(bin.length, fb1.getBinaryLength());
360         for (int i = 0; i < bin.length; i++) {
361           assertEquals(bin[i], data1[i + fb1.getBinaryOffset()]);
362         }
363         reader.close();
364         // force optimize
365
366
367         writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
368         writer.optimize();
369         writer.close();
370         reader = IndexReader.open(dir, false);
371         doc = reader.document(reader.maxDoc() - 1);
372         fields = doc.getFields("bin1");
373         assertNotNull(fields);
374         assertEquals(1, fields.length);
375         b1 = fields[0];
376         assertTrue(b1.isBinary());
377         data1 = b1.getBinaryValue();
378         assertEquals(bin.length, b1.getBinaryLength());
379         for (int i = 0; i < bin.length; i++) {
380           assertEquals(bin[i], data1[i + b1.getBinaryOffset()]);
381         }
382         reader.close();
383         dir.close();
384     }
385
386     // Make sure attempts to make changes after reader is
387     // closed throws IOException:
388     public void testChangesAfterClose() throws IOException {
389         Directory dir = newDirectory();
390
391         IndexWriter writer = null;
392         IndexReader reader = null;
393         Term searchTerm = new Term("content", "aaa");
394
395         //  add 11 documents with term : aaa
396         writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
397         for (int i = 0; i < 11; i++) {
398             addDoc(writer, searchTerm.text());
399         }
400         writer.close();
401
402         reader = IndexReader.open(dir, false);
403
404         // Close reader:
405         reader.close();
406
407         // Then, try to make changes:
408         try {
409           reader.deleteDocument(4);
410           fail("deleteDocument after close failed to throw IOException");
411         } catch (AlreadyClosedException e) {
412           // expected
413         }
414
415         try {
416           reader.setNorm(5, "aaa", 2.0f);
417           fail("setNorm after close failed to throw IOException");
418         } catch (AlreadyClosedException e) {
419           // expected
420         }
421
422         try {
423           reader.undeleteAll();
424           fail("undeleteAll after close failed to throw IOException");
425         } catch (AlreadyClosedException e) {
426           // expected
427         }
428         dir.close();
429     }
430
431     // Make sure we get lock obtain failed exception with 2 writers:
432     public void testLockObtainFailed() throws IOException {
433         Directory dir = newDirectory();
434
435         Term searchTerm = new Term("content", "aaa");
436
437         //  add 11 documents with term : aaa
438         IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
439         writer.commit();
440         for (int i = 0; i < 11; i++) {
441             addDoc(writer, searchTerm.text());
442         }
443
444         // Create reader:
445         IndexReader reader = IndexReader.open(dir, false);
446
447         // Try to make changes
448         try {
449           reader.deleteDocument(4);
450           fail("deleteDocument should have hit LockObtainFailedException");
451         } catch (LockObtainFailedException e) {
452           // expected
453         }
454
455         try {
456           reader.setNorm(5, "aaa", 2.0f);
457           fail("setNorm should have hit LockObtainFailedException");
458         } catch (LockObtainFailedException e) {
459           // expected
460         }
461
462         try {
463           reader.undeleteAll();
464           fail("undeleteAll should have hit LockObtainFailedException");
465         } catch (LockObtainFailedException e) {
466           // expected
467         }
468         writer.close();
469         reader.close();
470         dir.close();
471     }
472
473     // Make sure you can set norms & commit even if a reader
474     // is open against the index:
475     public void testWritingNorms() throws IOException {
476         Directory dir = newDirectory();
477         IndexWriter writer;
478         IndexReader reader;
479         Term searchTerm = new Term("content", "aaa");
480
481         //  add 1 documents with term : aaa
482         writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
483         addDoc(writer, searchTerm.text());
484         writer.close();
485
486         //  now open reader & set norm for doc 0
487         reader = IndexReader.open(dir, false);
488         reader.setNorm(0, "content", (float) 2.0);
489
490         // we should be holding the write lock now:
491         assertTrue("locked", IndexWriter.isLocked(dir));
492
493         reader.commit();
494
495         // we should not be holding the write lock now:
496         assertTrue("not locked", !IndexWriter.isLocked(dir));
497
498         // open a 2nd reader:
499         IndexReader reader2 = IndexReader.open(dir, false);
500
501         // set norm again for doc 0
502         reader.setNorm(0, "content", (float) 3.0);
503         assertTrue("locked", IndexWriter.isLocked(dir));
504
505         reader.close();
506
507         // we should not be holding the write lock now:
508         assertTrue("not locked", !IndexWriter.isLocked(dir));
509
510         reader2.close();
511         dir.close();
512     }
513
514
515     // Make sure you can set norms & commit, and there are
516     // no extra norms files left:
517     public void testWritingNormsNoReader() throws IOException {
518         Directory dir = newDirectory();
519         IndexWriter writer = null;
520         IndexReader reader = null;
521         Term searchTerm = new Term("content", "aaa");
522
523         //  add 1 documents with term : aaa
524         writer  = new IndexWriter(
525             dir,
526             newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
527                 setMergePolicy(newLogMergePolicy(false))
528         );
529         addDoc(writer, searchTerm.text());
530         writer.close();
531
532         //  now open reader & set norm for doc 0 (writes to
533         //  _0_1.s0)
534         reader = IndexReader.open(dir, false);
535         reader.setNorm(0, "content", (float) 2.0);
536         reader.close();
537         
538         //  now open reader again & set norm for doc 0 (writes to _0_2.s0)
539         reader = IndexReader.open(dir, false);
540         reader.setNorm(0, "content", (float) 2.0);
541         reader.close();
542         assertFalse("failed to remove first generation norms file on writing second generation",
543                     dir.fileExists("_0_1.s0"));
544         
545         dir.close();
546     }
547
548     /* ??? public void testOpenEmptyDirectory() throws IOException{
549       String dirName = "test.empty";
550       File fileDirName = new File(dirName);
551       if (!fileDirName.exists()) {
552         fileDirName.mkdir();
553       }
554       try {
555         IndexReader.open(fileDirName);
556         fail("opening IndexReader on empty directory failed to produce FileNotFoundException");
557       } catch (FileNotFoundException e) {
558         // GOOD
559       }
560       rmDir(fileDirName);
561     }*/
562     
563   public void testFilesOpenClose() throws IOException {
564         // Create initial data set
565         File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
566         Directory dir = newFSDirectory(dirFile);
567         IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
568         addDoc(writer, "test");
569         writer.close();
570         dir.close();
571
572         // Try to erase the data - this ensures that the writer closed all files
573         _TestUtil.rmDir(dirFile);
574         dir = newFSDirectory(dirFile);
575
576         // Now create the data set again, just as before
577         writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
578         addDoc(writer, "test");
579         writer.close();
580         dir.close();
581
582         // Now open existing directory and test that reader closes all files
583         dir = newFSDirectory(dirFile);
584         IndexReader reader1 = IndexReader.open(dir, false);
585         reader1.close();
586         dir.close();
587
588         // The following will fail if reader did not close
589         // all files
590         _TestUtil.rmDir(dirFile);
591     }
592
593     public void testLastModified() throws Exception {
594       for(int i=0;i<2;i++) {
595         final Directory dir = newDirectory();
596         assertFalse(IndexReader.indexExists(dir));
597         IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
598         addDocumentWithFields(writer);
599         assertTrue(IndexWriter.isLocked(dir));          // writer open, so dir is locked
600         writer.close();
601         assertTrue(IndexReader.indexExists(dir));
602         IndexReader reader = IndexReader.open(dir, false);
603         assertFalse(IndexWriter.isLocked(dir));         // reader only, no lock
604         long version = IndexReader.lastModified(dir);
605         if (i == 1) {
606           long version2 = IndexReader.lastModified(dir);
607           assertEquals(version, version2);
608         }
609         reader.close();
610         // modify index and check version has been
611         // incremented:
612         Thread.sleep(1000);
613
614         writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
615         addDocumentWithFields(writer);
616         writer.close();
617         reader = IndexReader.open(dir, false);
618         assertTrue("old lastModified is " + version + "; new lastModified is " + IndexReader.lastModified(dir), version <= IndexReader.lastModified(dir));
619         reader.close();
620         dir.close();
621       }
622     }
623
624     public void testVersion() throws IOException {
625       Directory dir = newDirectory();
626       assertFalse(IndexReader.indexExists(dir));
627       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
628       addDocumentWithFields(writer);
629       assertTrue(IndexWriter.isLocked(dir));            // writer open, so dir is locked
630       writer.close();
631       assertTrue(IndexReader.indexExists(dir));
632       IndexReader reader = IndexReader.open(dir, false);
633       assertFalse(IndexWriter.isLocked(dir));           // reader only, no lock
634       long version = IndexReader.getCurrentVersion(dir);
635       reader.close();
636       // modify index and check version has been
637       // incremented:
638       writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
639       addDocumentWithFields(writer);
640       writer.close();
641       reader = IndexReader.open(dir, false);
642       assertTrue("old version is " + version + "; new version is " + IndexReader.getCurrentVersion(dir), version < IndexReader.getCurrentVersion(dir));
643       reader.close();
644       dir.close();
645     }
646
647     public void testLock() throws IOException {
648       Directory dir = newDirectory();
649       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
650       addDocumentWithFields(writer);
651       writer.close();
652       writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
653       IndexReader reader = IndexReader.open(dir, false);
654       try {
655         reader.deleteDocument(0);
656         fail("expected lock");
657       } catch(IOException e) {
658         // expected exception
659       }
660       try {
661         IndexWriter.unlock(dir);                // this should not be done in the real world! 
662       } catch (LockReleaseFailedException lrfe) {
663         writer.close();
664       }
665       reader.deleteDocument(0);
666       reader.close();
667       writer.close();
668       dir.close();
669     }
670
671     public void testDocsOutOfOrderJIRA140() throws IOException {
672       Directory dir = newDirectory();      
673       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
674       for(int i=0;i<11;i++) {
675         addDoc(writer, "aaa");
676       }
677       writer.close();
678       IndexReader reader = IndexReader.open(dir, false);
679
680       // Try to delete an invalid docId, yet, within range
681       // of the final bits of the BitVector:
682
683       boolean gotException = false;
684       try {
685         reader.deleteDocument(11);
686       } catch (ArrayIndexOutOfBoundsException e) {
687         gotException = true;
688       }
689       reader.close();
690
691       writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
692
693       // We must add more docs to get a new segment written
694       for(int i=0;i<11;i++) {
695         addDoc(writer, "aaa");
696       }
697
698       // Without the fix for LUCENE-140 this call will
699       // [incorrectly] hit a "docs out of order"
700       // IllegalStateException because above out-of-bounds
701       // deleteDocument corrupted the index:
702       writer.optimize();
703       writer.close();
704       if (!gotException) {
705         fail("delete of out-of-bounds doc number failed to hit exception");
706       }
707       dir.close();
708     }
709
710     public void testExceptionReleaseWriteLockJIRA768() throws IOException {
711
712       Directory dir = newDirectory();      
713       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
714       addDoc(writer, "aaa");
715       writer.close();
716
717       IndexReader reader = IndexReader.open(dir, false);
718       try {
719         reader.deleteDocument(1);
720         fail("did not hit exception when deleting an invalid doc number");
721       } catch (ArrayIndexOutOfBoundsException e) {
722         // expected
723       }
724       reader.close();
725       if (IndexWriter.isLocked(dir)) {
726         fail("write lock is still held after close");
727       }
728
729       reader = IndexReader.open(dir, false);
730       try {
731         reader.setNorm(1, "content", (float) 2.0);
732         fail("did not hit exception when calling setNorm on an invalid doc number");
733       } catch (ArrayIndexOutOfBoundsException e) {
734         // expected
735       }
736       reader.close();
737       if (IndexWriter.isLocked(dir)) {
738         fail("write lock is still held after close");
739       }
740       dir.close();
741     }
742
743     private String arrayToString(String[] l) {
744       String s = "";
745       for(int i=0;i<l.length;i++) {
746         if (i > 0) {
747           s += "\n    ";
748         }
749         s += l[i];
750       }
751       return s;
752     }
753
754     public void testOpenReaderAfterDelete() throws IOException {
755       File dirFile = _TestUtil.getTempDir("deletetest");
756       Directory dir = newFSDirectory(dirFile);
757       try {
758         IndexReader.open(dir, false);
759         fail("expected FileNotFoundException");
760       } catch (FileNotFoundException e) {
761         // expected
762       }
763
764       dirFile.delete();
765
766       // Make sure we still get a CorruptIndexException (not NPE):
767       try {
768         IndexReader.open(dir, false);
769         fail("expected FileNotFoundException");
770       } catch (FileNotFoundException e) {
771         // expected
772       }
773       
774       dir.close();
775     }
776
777     static void addDocumentWithFields(IndexWriter writer) throws IOException
778     {
779         Document doc = new Document();
780         doc.add(newField("keyword","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
781         doc.add(newField("text","test1", Field.Store.YES, Field.Index.ANALYZED));
782         doc.add(newField("unindexed","test1", Field.Store.YES, Field.Index.NO));
783         doc.add(newField("unstored","test1", Field.Store.NO, Field.Index.ANALYZED));
784         writer.addDocument(doc);
785     }
786
787     static void addDocumentWithDifferentFields(IndexWriter writer) throws IOException
788     {
789         Document doc = new Document();
790         doc.add(newField("keyword2","test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
791         doc.add(newField("text2","test1", Field.Store.YES, Field.Index.ANALYZED));
792         doc.add(newField("unindexed2","test1", Field.Store.YES, Field.Index.NO));
793         doc.add(newField("unstored2","test1", Field.Store.NO, Field.Index.ANALYZED));
794         writer.addDocument(doc);
795     }
796
797     static void addDocumentWithTermVectorFields(IndexWriter writer) throws IOException
798     {
799         Document doc = new Document();
800         doc.add(newField("tvnot","tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
801         doc.add(newField("termvector","termvector", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
802         doc.add(newField("tvoffset","tvoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
803         doc.add(newField("tvposition","tvposition", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
804         doc.add(newField("tvpositionoffset","tvpositionoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
805         
806         writer.addDocument(doc);
807     }
808     
809     static void addDoc(IndexWriter writer, String value) throws IOException {
810         Document doc = new Document();
811         doc.add(newField("content", value, Field.Store.NO, Field.Index.ANALYZED));
812         writer.addDocument(doc);
813     }
814
815     public static void assertIndexEquals(IndexReader index1, IndexReader index2) throws IOException {
816       assertEquals("IndexReaders have different values for numDocs.", index1.numDocs(), index2.numDocs());
817       assertEquals("IndexReaders have different values for maxDoc.", index1.maxDoc(), index2.maxDoc());
818       assertEquals("Only one IndexReader has deletions.", index1.hasDeletions(), index2.hasDeletions());
819       assertEquals("Only one index is optimized.", index1.isOptimized(), index2.isOptimized());
820       
821       // check field names
822       Collection<String> fields1 = index1.getFieldNames(FieldOption.ALL);
823       Collection<String> fields2 = index1.getFieldNames(FieldOption.ALL);
824       assertEquals("IndexReaders have different numbers of fields.", fields1.size(), fields2.size());
825       Iterator<String> it1 = fields1.iterator();
826       Iterator<String> it2 = fields1.iterator();
827       while (it1.hasNext()) {
828         assertEquals("Different field names.", it1.next(), it2.next());
829       }
830       
831       // check norms
832       it1 = fields1.iterator();
833       while (it1.hasNext()) {
834         String curField = it1.next();
835         byte[] norms1 = index1.norms(curField);
836         byte[] norms2 = index2.norms(curField);
837         if (norms1 != null && norms2 != null)
838         {
839           assertEquals(norms1.length, norms2.length);
840                 for (int i = 0; i < norms1.length; i++) {
841                   assertEquals("Norm different for doc " + i + " and field '" + curField + "'.", norms1[i], norms2[i]);
842                 }
843         }
844         else
845         {
846           assertSame(norms1, norms2);
847         }
848       }
849       
850       // check deletions
851       for (int i = 0; i < index1.maxDoc(); i++) {
852         assertEquals("Doc " + i + " only deleted in one index.", index1.isDeleted(i), index2.isDeleted(i));
853       }
854       
855       // check stored fields
856       for (int i = 0; i < index1.maxDoc(); i++) {
857         if (!index1.isDeleted(i)) {
858           Document doc1 = index1.document(i);
859           Document doc2 = index2.document(i);
860           List<Fieldable> fieldable1 = doc1.getFields();
861           List<Fieldable> fieldable2 = doc2.getFields();
862           assertEquals("Different numbers of fields for doc " + i + ".", fieldable1.size(), fieldable2.size());
863           Iterator<Fieldable> itField1 = fieldable1.iterator();
864           Iterator<Fieldable> itField2 = fieldable2.iterator();
865           while (itField1.hasNext()) {
866             Field curField1 = (Field) itField1.next();
867             Field curField2 = (Field) itField2.next();
868             assertEquals("Different fields names for doc " + i + ".", curField1.name(), curField2.name());
869             assertEquals("Different field values for doc " + i + ".", curField1.stringValue(), curField2.stringValue());
870           }          
871         }
872       }
873       
874       // check dictionary and posting lists
875       TermEnum enum1 = index1.terms();
876       TermEnum enum2 = index2.terms();
877       TermPositions tp1 = index1.termPositions();
878       TermPositions tp2 = index2.termPositions();
879       while(enum1.next()) {
880         assertTrue(enum2.next());
881         assertEquals("Different term in dictionary.", enum1.term(), enum2.term());
882         tp1.seek(enum1.term());
883         tp2.seek(enum1.term());
884         while(tp1.next()) {
885           assertTrue(tp2.next());
886           assertEquals("Different doc id in postinglist of term " + enum1.term() + ".", tp1.doc(), tp2.doc());
887           assertEquals("Different term frequence in postinglist of term " + enum1.term() + ".", tp1.freq(), tp2.freq());
888           for (int i = 0; i < tp1.freq(); i++) {
889             assertEquals("Different positions in postinglist of term " + enum1.term() + ".", tp1.nextPosition(), tp2.nextPosition());
890           }
891         }
892       }
893     }
894
895     public void testGetIndexCommit() throws IOException {
896
897       Directory d = newDirectory();
898
899       // set up writer
900       IndexWriter writer = new IndexWriter(
901           d,
902           newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
903               setMaxBufferedDocs(2).
904               setMergePolicy(newLogMergePolicy(10))
905       );
906       for(int i=0;i<27;i++)
907         addDocumentWithFields(writer);
908       writer.close();
909
910       SegmentInfos sis = new SegmentInfos();
911       sis.read(d);
912       IndexReader r = IndexReader.open(d, false);
913       IndexCommit c = r.getIndexCommit();
914
915       assertEquals(sis.getCurrentSegmentFileName(), c.getSegmentsFileName());
916
917       assertTrue(c.equals(r.getIndexCommit()));
918
919       // Change the index
920       writer = new IndexWriter(
921           d,
922           newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
923               setOpenMode(OpenMode.APPEND).
924               setMaxBufferedDocs(2).
925               setMergePolicy(newLogMergePolicy(10))
926       );
927       for(int i=0;i<7;i++)
928         addDocumentWithFields(writer);
929       writer.close();
930
931       IndexReader r2 = r.reopen();
932       assertFalse(c.equals(r2.getIndexCommit()));
933       r2.close();
934
935       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
936         new MockAnalyzer(random))
937         .setOpenMode(OpenMode.APPEND));
938       writer.optimize();
939       writer.close();
940
941       r2 = r.reopen();
942
943       r.close();
944       r2.close();
945       d.close();
946     }      
947
948     public void testReadOnly() throws Throwable {
949       Directory d = newDirectory();
950       IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
951         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
952       addDocumentWithFields(writer);
953       writer.commit();
954       addDocumentWithFields(writer);
955       writer.close();
956
957       IndexReader r = IndexReader.open(d, true);
958       try {
959         r.deleteDocument(0);
960         fail();
961       } catch (UnsupportedOperationException uoe) {
962         // expected
963       }
964
965       writer = new IndexWriter(
966           d,
967           newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
968               setOpenMode(OpenMode.APPEND).
969               setMergePolicy(newLogMergePolicy(10))
970       );
971       addDocumentWithFields(writer);
972       writer.close();
973
974       // Make sure reopen is still readonly:
975       IndexReader r2 = r.reopen();
976       r.close();
977
978       assertFalse(r == r2);
979
980       try {
981         r2.deleteDocument(0);
982         fail();
983       } catch (UnsupportedOperationException uoe) {
984         // expected
985       }
986
987       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
988         new MockAnalyzer(random))
989         .setOpenMode(OpenMode.APPEND));
990       writer.optimize();
991       writer.close();
992
993       // Make sure reopen to a single segment is still readonly:
994       IndexReader r3 = r2.reopen();
995       assertFalse(r3 == r2);
996       r2.close();
997       
998       assertFalse(r == r2);
999
1000       try {
1001         r3.deleteDocument(0);
1002         fail();
1003       } catch (UnsupportedOperationException uoe) {
1004         // expected
1005       }
1006
1007       // Make sure write lock isn't held
1008       writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
1009           new MockAnalyzer(random))
1010       .setOpenMode(OpenMode.APPEND));
1011       writer.close();
1012
1013       r3.close();
1014       d.close();
1015     }
1016
1017
1018   // LUCENE-1474
1019   public void testIndexReader() throws Exception {
1020     Directory dir = newDirectory();
1021     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
1022         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1023     writer.addDocument(createDocument("a"));
1024     writer.addDocument(createDocument("b"));
1025     writer.addDocument(createDocument("c"));
1026     writer.close();
1027     IndexReader reader = IndexReader.open(dir, false);
1028     reader.deleteDocuments(new Term("id", "a"));
1029     reader.flush();
1030     reader.deleteDocuments(new Term("id", "b"));
1031     reader.close();
1032     IndexReader.open(dir,true).close();
1033     dir.close();
1034   }
1035
1036   static Document createDocument(String id) {
1037     Document doc = new Document();
1038     doc.add(newField("id", id, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
1039     return doc;
1040   }
1041
1042   // LUCENE-1468 -- make sure on attempting to open an
1043   // IndexReader on a non-existent directory, you get a
1044   // good exception
1045   public void testNoDir() throws Throwable {
1046     Directory dir = newFSDirectory(_TestUtil.getTempDir("doesnotexist"));
1047     try {
1048       IndexReader.open(dir, true);
1049       fail("did not hit expected exception");
1050     } catch (NoSuchDirectoryException nsde) {
1051       // expected
1052     }
1053     dir.close();
1054   }
1055
1056   // LUCENE-1509
1057   public void testNoDupCommitFileNames() throws Throwable {
1058
1059     Directory dir = newDirectory();
1060     
1061     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
1062         TEST_VERSION_CURRENT, new MockAnalyzer(random))
1063         .setMaxBufferedDocs(2));
1064     writer.addDocument(createDocument("a"));
1065     writer.addDocument(createDocument("a"));
1066     writer.addDocument(createDocument("a"));
1067     writer.close();
1068     
1069     Collection<IndexCommit> commits = IndexReader.listCommits(dir);
1070     for (final IndexCommit commit : commits) {
1071       Collection<String> files = commit.getFileNames();
1072       HashSet<String> seen = new HashSet<String>();
1073       for (final String fileName : files) { 
1074         assertTrue("file " + fileName + " was duplicated", !seen.contains(fileName));
1075         seen.add(fileName);
1076       }
1077     }
1078
1079     dir.close();
1080   }
1081
1082   // LUCENE-1579: Ensure that on a cloned reader, segments
1083   // reuse the doc values arrays in FieldCache
1084   public void testFieldCacheReuseAfterClone() throws Exception {
1085     Directory dir = newDirectory();
1086     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1087     Document doc = new Document();
1088     doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
1089     writer.addDocument(doc);
1090     writer.close();
1091
1092     // Open reader
1093     IndexReader r = SegmentReader.getOnlySegmentReader(dir);
1094     final int[] ints = FieldCache.DEFAULT.getInts(r, "number");
1095     assertEquals(1, ints.length);
1096     assertEquals(17, ints[0]);
1097
1098     // Clone reader
1099     IndexReader r2 = (IndexReader) r.clone();
1100     r.close();
1101     assertTrue(r2 != r);
1102     final int[] ints2 = FieldCache.DEFAULT.getInts(r2, "number");
1103     r2.close();
1104
1105     assertEquals(1, ints2.length);
1106     assertEquals(17, ints2[0]);
1107     assertTrue(ints == ints2);
1108
1109     dir.close();
1110   }
1111
1112   // LUCENE-1579: Ensure that on a reopened reader, that any
1113   // shared segments reuse the doc values arrays in
1114   // FieldCache
1115   public void testFieldCacheReuseAfterReopen() throws Exception {
1116     Directory dir = newDirectory();
1117     IndexWriter writer = new IndexWriter(
1118         dir,
1119         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
1120             setMergePolicy(newLogMergePolicy(10))
1121     );
1122     Document doc = new Document();
1123     doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
1124     ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
1125     writer.addDocument(doc);
1126     writer.commit();
1127
1128     // Open reader1
1129     IndexReader r = IndexReader.open(dir, false);
1130     IndexReader r1 = SegmentReader.getOnlySegmentReader(r);
1131     final int[] ints = FieldCache.DEFAULT.getInts(r1, "number");
1132     assertEquals(1, ints.length);
1133     assertEquals(17, ints[0]);
1134
1135     // Add new segment
1136     writer.addDocument(doc);
1137     writer.commit();
1138
1139     // Reopen reader1 --> reader2
1140     IndexReader r2 = r.reopen();
1141     r.close();
1142     IndexReader sub0 = r2.getSequentialSubReaders()[0];
1143     final int[] ints2 = FieldCache.DEFAULT.getInts(sub0, "number");
1144     r2.close();
1145     assertTrue(ints == ints2);
1146
1147     writer.close();
1148     dir.close();
1149   }
1150
1151   // LUCENE-1586: getUniqueTermCount
1152   public void testUniqueTermCount() throws Exception {
1153     Directory dir = newDirectory();
1154     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1155     Document doc = new Document();
1156     doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
1157     doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
1158     writer.addDocument(doc);
1159     writer.addDocument(doc);
1160     writer.commit();
1161
1162     IndexReader r = IndexReader.open(dir, false);
1163     IndexReader r1 = SegmentReader.getOnlySegmentReader(r);
1164     assertEquals(36, r1.getUniqueTermCount());
1165     writer.addDocument(doc);
1166     writer.commit();
1167     IndexReader r2 = r.reopen();
1168     r.close();
1169     try {
1170       r2.getUniqueTermCount();
1171       fail("expected exception");
1172     } catch (UnsupportedOperationException uoe) {
1173       // expected
1174     }
1175     IndexReader[] subs = r2.getSequentialSubReaders();
1176     for(int i=0;i<subs.length;i++) {
1177       assertEquals(36, subs[i].getUniqueTermCount());
1178     }
1179     r2.close();
1180     writer.close();
1181     dir.close();
1182   }
1183
1184   // LUCENE-1609: don't load terms index
1185   public void testNoTermsIndex() throws Throwable {
1186     Directory dir = newDirectory();
1187     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1188     Document doc = new Document();
1189     doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
1190     doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
1191     writer.addDocument(doc);
1192     writer.addDocument(doc);
1193     writer.close();
1194
1195     IndexReader r = IndexReader.open(dir, null, true, -1);
1196     try {
1197       r.docFreq(new Term("field", "f"));
1198       fail("did not hit expected exception");
1199     } catch (IllegalStateException ise) {
1200       // expected
1201     }
1202     assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
1203
1204     assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
1205     writer = new IndexWriter(
1206         dir,
1207         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
1208             setMergePolicy(newLogMergePolicy(10))
1209     );
1210     writer.addDocument(doc);
1211     writer.close();
1212
1213     // LUCENE-1718: ensure re-open carries over no terms index:
1214     IndexReader r2 = r.reopen();
1215     r.close();
1216     IndexReader[] subReaders = r2.getSequentialSubReaders();
1217     assertEquals(2, subReaders.length);
1218     for(int i=0;i<2;i++) {
1219       assertFalse(((SegmentReader) subReaders[i]).termsIndexLoaded());
1220     }
1221     r2.close();
1222     dir.close();
1223   }
1224
1225   // LUCENE-2046
1226   public void testPrepareCommitIsCurrent() throws Throwable {
1227     Directory dir = newDirectory();
1228     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
1229         TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1230     writer.commit();
1231     Document doc = new Document();
1232     writer.addDocument(doc);
1233     IndexReader r = IndexReader.open(dir, true);
1234     assertTrue(r.isCurrent());
1235     writer.addDocument(doc);
1236     writer.prepareCommit();
1237     assertTrue(r.isCurrent());
1238     IndexReader r2 = r.reopen();
1239     assertTrue(r == r2);
1240     writer.commit();
1241     assertFalse(r.isCurrent());
1242     writer.close();
1243     r.close();
1244     dir.close();
1245   }
1246   
1247   // LUCENE-2753
1248   public void testListCommits() throws Exception {
1249     Directory dir = newDirectory();
1250     SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
1251     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( 
1252         TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(sdp));
1253     writer.addDocument(new Document());
1254     writer.commit();
1255     sdp.snapshot("c1");
1256     writer.addDocument(new Document());
1257     writer.commit();
1258     sdp.snapshot("c2");
1259     writer.addDocument(new Document());
1260     writer.commit();
1261     sdp.snapshot("c3");
1262     writer.close();
1263     long currentGen = 0;
1264     for (IndexCommit ic : IndexReader.listCommits(dir)) {
1265       assertTrue("currentGen=" + currentGen + " commitGen=" + ic.getGeneration(), currentGen < ic.getGeneration());
1266       currentGen = ic.getGeneration();
1267     }
1268     dir.close();
1269   }
1270
1271   // LUCENE-2812
1272   public void testIndexExists() throws Exception {
1273     Directory dir = newDirectory();
1274     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1275     writer.addDocument(new Document());
1276     writer.prepareCommit();
1277     assertFalse(IndexReader.indexExists(dir));
1278     writer.close();
1279     assertTrue(IndexReader.indexExists(dir));
1280     dir.close();
1281   }
1282
1283   // LUCENE-2474
1284   public void testReaderFinishedListener() throws Exception {
1285     Directory dir = newDirectory();
1286     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
1287     ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
1288     writer.setInfoStream(VERBOSE ? System.out : null);
1289     writer.addDocument(new Document());
1290     writer.commit();
1291     writer.addDocument(new Document());
1292     writer.commit();
1293     final IndexReader reader = writer.getReader();
1294     final int[] closeCount = new int[1];
1295     final IndexReader.ReaderFinishedListener listener = new IndexReader.ReaderFinishedListener() {
1296       public void finished(IndexReader reader) {
1297         closeCount[0]++;
1298       }
1299     };
1300
1301     reader.addReaderFinishedListener(listener);
1302
1303     reader.close();
1304
1305     // Just the top reader
1306     assertEquals(1, closeCount[0]);
1307     writer.close();
1308
1309     // Now also the subs
1310     assertEquals(3, closeCount[0]);
1311
1312     IndexReader reader2 = IndexReader.open(dir);
1313     reader2.addReaderFinishedListener(listener);
1314
1315     closeCount[0] = 0;
1316     reader2.close();
1317     assertEquals(3, closeCount[0]);
1318     dir.close();
1319   }
1320
1321   public void testOOBDocID() throws Exception {
1322     Directory dir = newDirectory();
1323     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1324     writer.addDocument(new Document());
1325     IndexReader r = writer.getReader();
1326     writer.close();
1327     r.document(0);
1328     try {
1329       r.document(1);
1330       fail("did not hit exception");
1331     } catch (IllegalArgumentException iae) {
1332       // expected
1333     }
1334     r.close();
1335     dir.close();
1336   }
1337 }