1 package org.apache.lucene.index;
4 * Licensed to the Apache Software Foundation (ASF) under one or more
5 * contributor license agreements. See the NOTICE file distributed with
6 * this work for additional information regarding copyright ownership.
7 * The ASF licenses this file to You under the Apache License, Version 2.0
8 * (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
20 import org.apache.lucene.util.LuceneTestCase;
22 import org.apache.lucene.analysis.MockAnalyzer;
23 import org.apache.lucene.analysis.standard.StandardAnalyzer;
24 import org.apache.lucene.document.Document;
25 import org.apache.lucene.document.Field;
26 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
27 import org.apache.lucene.store.Directory;
29 import java.io.IOException;
30 import java.util.Random;
32 public class TestDirectoryReader extends LuceneTestCase {
33 protected Directory dir;
34 private Document doc1;
35 private Document doc2;
36 protected SegmentReader [] readers = new SegmentReader[2];
37 protected SegmentInfos sis;
40 public void setUp() throws Exception {
43 doc1 = new Document();
44 doc2 = new Document();
45 DocHelper.setupDoc(doc1);
46 DocHelper.setupDoc(doc2);
47 DocHelper.writeDoc(random, dir, doc1);
48 DocHelper.writeDoc(random, dir, doc2);
49 sis = new SegmentInfos();
54 public void tearDown() throws Exception {
55 if (readers[0] != null) readers[0].close();
56 if (readers[1] != null) readers[1].close();
61 protected IndexReader openReader() throws IOException {
63 reader = IndexReader.open(dir, false);
64 assertTrue(reader instanceof DirectoryReader);
66 assertTrue(dir != null);
67 assertTrue(sis != null);
68 assertTrue(reader != null);
73 public void test() throws Exception {
78 public void doTestDocument() throws IOException {
80 IndexReader reader = openReader();
81 assertTrue(reader != null);
82 Document newDoc1 = reader.document(0);
83 assertTrue(newDoc1 != null);
84 assertTrue(DocHelper.numFields(newDoc1) == DocHelper.numFields(doc1) - DocHelper.unstored.size());
85 Document newDoc2 = reader.document(1);
86 assertTrue(newDoc2 != null);
87 assertTrue(DocHelper.numFields(newDoc2) == DocHelper.numFields(doc2) - DocHelper.unstored.size());
88 TermFreqVector vector = reader.getTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
89 assertTrue(vector != null);
90 TestSegmentReader.checkNorms(reader);
94 public void doTestUndeleteAll() throws IOException {
96 IndexReader reader = openReader();
97 assertTrue(reader != null);
98 assertEquals( 2, reader.numDocs() );
99 reader.deleteDocument(0);
100 assertEquals( 1, reader.numDocs() );
101 reader.undeleteAll();
102 assertEquals( 2, reader.numDocs() );
104 // Ensure undeleteAll survives commit/close/reopen:
108 if (reader instanceof MultiReader)
109 // MultiReader does not "own" the directory so it does
110 // not write the changes to sis on commit:
114 reader = openReader();
115 assertEquals( 2, reader.numDocs() );
117 reader.deleteDocument(0);
118 assertEquals( 1, reader.numDocs() );
121 if (reader instanceof MultiReader)
122 // MultiReader does not "own" the directory so it does
123 // not write the changes to sis on commit:
126 reader = openReader();
127 assertEquals( 1, reader.numDocs() );
131 public void testIsCurrent() throws IOException {
132 Directory ramDir1=newDirectory();
133 addDoc(random, ramDir1, "test foo", true);
134 Directory ramDir2=newDirectory();
135 addDoc(random, ramDir2, "test blah", true);
136 IndexReader[] readers = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir2, false)};
137 MultiReader mr = new MultiReader(readers);
138 assertTrue(mr.isCurrent()); // just opened, must be current
139 addDoc(random, ramDir1, "more text", false);
140 assertFalse(mr.isCurrent()); // has been modified, not current anymore
141 addDoc(random, ramDir2, "even more text", false);
142 assertFalse(mr.isCurrent()); // has been modified even more, not current anymore
146 } catch (UnsupportedOperationException e) {
147 // expected exception
154 public void testMultiTermDocs() throws IOException {
155 Directory ramDir1=newDirectory();
156 addDoc(random, ramDir1, "test foo", true);
157 Directory ramDir2=newDirectory();
158 addDoc(random, ramDir2, "test blah", true);
159 Directory ramDir3=newDirectory();
160 addDoc(random, ramDir3, "test wow", true);
162 IndexReader[] readers1 = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir3, false)};
163 IndexReader[] readers2 = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir2, false), IndexReader.open(ramDir3, false)};
164 MultiReader mr2 = new MultiReader(readers1);
165 MultiReader mr3 = new MultiReader(readers2);
167 // test mixing up TermDocs and TermEnums from different readers.
168 TermDocs td2 = mr2.termDocs();
169 TermEnum te3 = mr3.terms(new Term("body","wow"));
173 // This should blow up if we forget to check that the TermEnum is from the same
174 // reader as the TermDocs.
175 while (td2.next()) ret += td2.doc();
179 // really a dummy assert to ensure that we got some docs and to ensure that
180 // nothing is optimized out.
192 public void testAllTermDocs() throws IOException {
193 IndexReader reader = openReader();
195 TermDocs td = reader.termDocs(null);
196 for(int i=0;i<NUM_DOCS;i++) {
197 assertTrue(td.next());
198 assertEquals(i, td.doc());
199 assertEquals(1, td.freq());
205 private void addDoc(Random random, Directory ramDir1, String s, boolean create) throws IOException {
206 IndexWriter iw = new IndexWriter(ramDir1, newIndexWriterConfig(
207 TEST_VERSION_CURRENT,
208 new MockAnalyzer(random)).setOpenMode(
209 create ? OpenMode.CREATE : OpenMode.APPEND));
210 Document doc = new Document();
211 doc.add(newField("body", s, Field.Store.YES, Field.Index.ANALYZED));