1 package org.apache.lucene.index;
3 * Licensed to the Apache Software Foundation (ASF) under one or more
4 * contributor license agreements. See the NOTICE file distributed with
5 * this work for additional information regarding copyright ownership.
6 * The ASF licenses this file to You under the Apache License, Version 2.0
7 * (the "License"); you may not use this file except in compliance with
8 * the License. You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
19 import org.apache.lucene.analysis.MockAnalyzer;
21 import org.apache.lucene.document.Document;
22 import org.apache.lucene.document.Field;
23 import org.apache.lucene.store.Directory;
24 import org.apache.lucene.util.LuceneTestCase;
26 public class TestMultiPassIndexSplitter extends LuceneTestCase {
32 public void setUp() throws Exception {
35 IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
37 for (int i = 0; i < NUM_DOCS; i++) {
39 doc.add(newField("id", i + "", Field.Store.YES, Field.Index.NOT_ANALYZED));
40 doc.add(newField("f", i + " " + i, Field.Store.YES, Field.Index.ANALYZED));
44 input = IndexReader.open(dir, false);
45 // delete the last doc
46 input.deleteDocument(input.maxDoc() - 1);
50 public void tearDown() throws Exception {
57 * Test round-robin splitting.
59 public void testSplitRR() throws Exception {
60 MultiPassIndexSplitter splitter = new MultiPassIndexSplitter();
61 Directory[] dirs = new Directory[]{
66 splitter.split(TEST_VERSION_CURRENT, input, dirs, false);
68 ir = IndexReader.open(dirs[0], true);
69 assertTrue(ir.numDocs() - NUM_DOCS / 3 <= 1); // rounding error
70 Document doc = ir.document(0);
71 assertEquals("0", doc.get("id"));
74 t = new Term("id", "1");
76 assertNotSame(t, te.term());
78 ir = IndexReader.open(dirs[1], true);
79 assertTrue(ir.numDocs() - NUM_DOCS / 3 <= 1);
81 assertEquals("1", doc.get("id"));
82 t = new Term("id", "0");
84 assertNotSame(t, te.term());
86 ir = IndexReader.open(dirs[2], true);
87 assertTrue(ir.numDocs() - NUM_DOCS / 3 <= 1);
89 assertEquals("2", doc.get("id"));
90 t = new Term("id", "1");
92 assertNotSame(t, te.term());
93 t = new Term("id", "0");
95 assertNotSame(t, te.term());
97 for (Directory d : dirs)
102 * Test sequential splitting.
104 public void testSplitSeq() throws Exception {
105 MultiPassIndexSplitter splitter = new MultiPassIndexSplitter();
106 Directory[] dirs = new Directory[]{
111 splitter.split(TEST_VERSION_CURRENT, input, dirs, true);
113 ir = IndexReader.open(dirs[0], true);
114 assertTrue(ir.numDocs() - NUM_DOCS / 3 <= 1);
115 Document doc = ir.document(0);
116 assertEquals("0", doc.get("id"));
117 int start = ir.numDocs();
119 ir = IndexReader.open(dirs[1], true);
120 assertTrue(ir.numDocs() - NUM_DOCS / 3 <= 1);
121 doc = ir.document(0);
122 assertEquals(start + "", doc.get("id"));
123 start += ir.numDocs();
125 ir = IndexReader.open(dirs[2], true);
126 assertTrue(ir.numDocs() - NUM_DOCS / 3 <= 1);
127 doc = ir.document(0);
128 assertEquals(start + "", doc.get("id"));
129 // make sure the deleted doc is not here
132 t = new Term("id", (NUM_DOCS - 1) + "");
134 assertNotSame(t, te.term());
136 for (Directory d : dirs)