1 package org.apache.lucene.index;
4 * Licensed to the Apache Software Foundation (ASF) under one or more
5 * contributor license agreements. See the NOTICE file distributed with
6 * this work for additional information regarding copyright ownership.
7 * The ASF licenses this file to You under the Apache License, Version 2.0
8 * (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
20 import java.io.ByteArrayOutputStream;
21 import java.io.IOException;
22 import java.io.PrintStream;
23 import java.io.Reader;
24 import java.io.StringReader;
25 import java.util.ArrayList;
26 import java.util.List;
27 import java.util.Random;
29 import org.apache.lucene.analysis.Analyzer;
30 import org.apache.lucene.analysis.MockAnalyzer;
31 import org.apache.lucene.analysis.MockTokenizer;
32 import org.apache.lucene.analysis.TokenFilter;
33 import org.apache.lucene.analysis.TokenStream;
34 import org.apache.lucene.document.Document;
35 import org.apache.lucene.document.Field;
36 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
37 import org.apache.lucene.search.IndexSearcher;
38 import org.apache.lucene.search.PhraseQuery;
39 import org.apache.lucene.store.Directory;
40 import org.apache.lucene.store.IndexInput;
41 import org.apache.lucene.store.IndexOutput;
42 import org.apache.lucene.store.MockDirectoryWrapper;
43 import org.apache.lucene.store.RAMDirectory;
44 import org.apache.lucene.util.LuceneTestCase;
45 import org.apache.lucene.util._TestUtil;
47 public class TestIndexWriterExceptions extends LuceneTestCase {
49 private class IndexerThread extends Thread {
53 final Random r = new Random(random.nextLong());
54 volatile Throwable failure;
56 public IndexerThread(int i, IndexWriter writer) {
57 setName("Indexer " + i);
64 final Document doc = new Document();
66 doc.add(newField(r, "content1", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.ANALYZED));
67 doc.add(newField(r, "content6", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
68 doc.add(newField(r, "content2", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NOT_ANALYZED));
69 doc.add(newField(r, "content3", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NO));
71 doc.add(newField(r, "content4", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED));
72 doc.add(newField(r, "content5", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED));
74 doc.add(newField(r, "content7", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
76 final Field idField = newField(r, "id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
79 final long stopTime = System.currentTimeMillis() + 500;
83 System.out.println(Thread.currentThread().getName() + ": TEST: IndexerThread: cycle");
86 final String id = ""+r.nextInt(50);
88 Term idTerm = new Term("id", id);
90 if (r.nextBoolean()) {
91 final List<Document> docs = new ArrayList<Document>();
92 final int count = _TestUtil.nextInt(r, 1, 20);
93 for(int c=0;c<count;c++) {
96 writer.updateDocuments(idTerm, docs);
98 writer.updateDocument(idTerm, doc);
100 } catch (RuntimeException re) {
102 System.out.println(Thread.currentThread().getName() + ": EXC: ");
103 re.printStackTrace(System.out);
106 _TestUtil.checkIndex(writer.getDirectory());
107 } catch (IOException ioe) {
108 System.out.println(Thread.currentThread().getName() + ": unexpected exception1");
109 ioe.printStackTrace(System.out);
113 } catch (Throwable t) {
114 System.out.println(Thread.currentThread().getName() + ": unexpected exception2");
115 t.printStackTrace(System.out);
122 // After a possible exception (above) I should be able
123 // to add a new document without hitting an
126 writer.updateDocument(idTerm, doc);
127 } catch (Throwable t) {
128 System.out.println(Thread.currentThread().getName() + ": unexpected exception3");
129 t.printStackTrace(System.out);
133 } while(System.currentTimeMillis() < stopTime);
137 ThreadLocal<Thread> doFail = new ThreadLocal<Thread>();
139 private class MockIndexWriter extends IndexWriter {
140 Random r = new Random(random.nextLong());
142 public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
147 boolean testPoint(String name) {
148 if (doFail.get() != null && !name.equals("startDoFlush") && r.nextInt(40) == 17) {
150 System.out.println(Thread.currentThread().getName() + ": NOW FAIL: " + name);
151 new Throwable().printStackTrace(System.out);
153 throw new RuntimeException(Thread.currentThread().getName() + ": intentionally failing at " + name);
159 public void testRandomExceptions() throws Throwable {
161 System.out.println("\nTEST: start testRandomExceptions");
163 MockDirectoryWrapper dir = newDirectory();
165 MockAnalyzer analyzer = new MockAnalyzer(random);
166 analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
167 MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
168 .setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()));
169 ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
170 //writer.setMaxBufferedDocs(10);
172 System.out.println("TEST: initial commit");
177 writer.setInfoStream(System.out);
180 IndexerThread thread = new IndexerThread(0, writer);
182 if (thread.failure != null) {
183 thread.failure.printStackTrace(System.out);
184 fail("thread " + thread.getName() + ": hit unexpected failure");
188 System.out.println("TEST: commit after thread start");
194 } catch (Throwable t) {
195 System.out.println("exception during close:");
196 t.printStackTrace(System.out);
200 // Confirm that when doc hits exception partway through tokenization, it's deleted:
201 IndexReader r2 = IndexReader.open(dir, true);
202 final int count = r2.docFreq(new Term("content4", "aaa"));
203 final int count2 = r2.docFreq(new Term("content4", "ddd"));
204 assertEquals(count, count2);
210 public void testRandomExceptionsThreads() throws Throwable {
211 MockDirectoryWrapper dir = newDirectory();
212 MockAnalyzer analyzer = new MockAnalyzer(random);
213 analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
214 MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
215 .setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()));
216 ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
217 //writer.setMaxBufferedDocs(10);
221 writer.setInfoStream(System.out);
224 final int NUM_THREADS = 4;
226 final IndexerThread[] threads = new IndexerThread[NUM_THREADS];
227 for(int i=0;i<NUM_THREADS;i++) {
228 threads[i] = new IndexerThread(i, writer);
232 for(int i=0;i<NUM_THREADS;i++)
235 for(int i=0;i<NUM_THREADS;i++)
236 if (threads[i].failure != null)
237 fail("thread " + threads[i].getName() + ": hit unexpected failure");
243 } catch (Throwable t) {
244 System.out.println("exception during close:");
245 t.printStackTrace(System.out);
249 // Confirm that when doc hits exception partway through tokenization, it's deleted:
250 IndexReader r2 = IndexReader.open(dir, true);
251 final int count = r2.docFreq(new Term("content4", "aaa"));
252 final int count2 = r2.docFreq(new Term("content4", "ddd"));
253 assertEquals(count, count2);
260 private static final class MockIndexWriter2 extends IndexWriter {
262 public MockIndexWriter2(Directory dir, IndexWriterConfig conf) throws IOException {
269 boolean testPoint(String name) {
270 if (doFail && name.equals("DocumentsWriter.ThreadState.init start"))
271 throw new RuntimeException("intentionally failing");
276 private static String CRASH_FAIL_MESSAGE = "I'm experiencing problems";
278 private class CrashingFilter extends TokenFilter {
282 public CrashingFilter(String fieldName, TokenStream input) {
284 this.fieldName = fieldName;
288 public boolean incrementToken() throws IOException {
289 if (this.fieldName.equals("crash") && count++ >= 4)
290 throw new IOException(CRASH_FAIL_MESSAGE);
291 return input.incrementToken();
295 public void reset() throws IOException {
301 public void testExceptionDocumentsWriterInit() throws IOException {
302 Directory dir = newDirectory();
303 MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
304 w.setInfoStream(VERBOSE ? System.out : null);
305 Document doc = new Document();
306 doc.add(newField("field", "a field", Field.Store.YES,
307 Field.Index.ANALYZED));
312 fail("did not hit exception");
313 } catch (RuntimeException re) {
321 public void testExceptionJustBeforeFlush() throws IOException {
322 Directory dir = newDirectory();
323 MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
324 w.setInfoStream(VERBOSE ? System.out : null);
325 Document doc = new Document();
326 doc.add(newField("field", "a field", Field.Store.YES,
327 Field.Index.ANALYZED));
330 Analyzer analyzer = new Analyzer() {
332 public TokenStream tokenStream(String fieldName, Reader reader) {
333 MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
334 tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
335 return new CrashingFilter(fieldName, tokenizer);
339 Document crashDoc = new Document();
340 crashDoc.add(newField("crash", "do it on token 4", Field.Store.YES,
341 Field.Index.ANALYZED));
343 w.addDocument(crashDoc, analyzer);
344 fail("did not hit expected exception");
345 } catch (IOException ioe) {
353 private static final class MockIndexWriter3 extends IndexWriter {
355 public MockIndexWriter3(Directory dir, IndexWriterConfig conf) throws IOException {
363 boolean testPoint(String name) {
364 if (doFail && name.equals("startMergeInit")) {
366 throw new RuntimeException("intentionally failing");
374 public void testExceptionOnMergeInit() throws IOException {
375 Directory dir = newDirectory();
376 IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
377 .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy());
378 ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
379 MockIndexWriter3 w = new MockIndexWriter3(dir, conf);
381 Document doc = new Document();
382 doc.add(newField("field", "a field", Field.Store.YES,
383 Field.Index.ANALYZED));
384 for(int i=0;i<10;i++)
387 } catch (RuntimeException re) {
391 ((ConcurrentMergeScheduler) w.getConfig().getMergeScheduler()).sync();
392 assertTrue(w.failed);
398 public void testExceptionFromTokenStream() throws IOException {
399 Directory dir = newDirectory();
400 IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new Analyzer() {
403 public TokenStream tokenStream(String fieldName, Reader reader) {
404 MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true);
405 tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
406 return new TokenFilter(tokenizer) {
407 private int count = 0;
410 public boolean incrementToken() throws IOException {
412 throw new IOException();
414 return input.incrementToken();
420 conf.setMaxBufferedDocs(Math.max(3, conf.getMaxBufferedDocs()));
422 IndexWriter writer = new IndexWriter(dir, conf);
424 Document doc = new Document();
425 String contents = "aa bb cc dd ee ff gg hh ii jj kk";
426 doc.add(newField("content", contents, Field.Store.NO,
427 Field.Index.ANALYZED));
429 writer.addDocument(doc);
430 fail("did not hit expected exception");
431 } catch (Exception e) {
434 // Make sure we can add another normal document
435 doc = new Document();
436 doc.add(newField("content", "aa bb cc dd", Field.Store.NO,
437 Field.Index.ANALYZED));
438 writer.addDocument(doc);
440 // Make sure we can add another normal document
441 doc = new Document();
442 doc.add(newField("content", "aa bb cc dd", Field.Store.NO,
443 Field.Index.ANALYZED));
444 writer.addDocument(doc);
447 IndexReader reader = IndexReader.open(dir, true);
448 final Term t = new Term("content", "aa");
449 assertEquals(3, reader.docFreq(t));
451 // Make sure the doc that hit the exception was marked
453 TermDocs tdocs = reader.termDocs(t);
455 while(tdocs.next()) {
458 assertEquals(2, count);
460 assertEquals(reader.docFreq(new Term("content", "gg")), 0);
465 private static class FailOnlyOnFlush extends MockDirectoryWrapper.Failure {
466 boolean doFail = false;
470 public void setDoFail() {
474 public void clearDoFail() {
479 public void eval(MockDirectoryWrapper dir) throws IOException {
481 StackTraceElement[] trace = new Exception().getStackTrace();
482 boolean sawAppend = false;
483 boolean sawFlush = false;
484 for (int i = 0; i < trace.length; i++) {
485 if ("org.apache.lucene.index.FreqProxTermsWriter".equals(trace[i].getClassName()) && "appendPostings".equals(trace[i].getMethodName()))
487 if ("doFlush".equals(trace[i].getMethodName()))
491 if (sawAppend && sawFlush && count++ >= 30) {
493 throw new IOException("now failing during flush");
499 // LUCENE-1072: make sure an errant exception on flushing
500 // one segment only takes out those docs in that one flush
501 public void testDocumentsWriterAbort() throws IOException {
502 MockDirectoryWrapper dir = newDirectory();
503 FailOnlyOnFlush failure = new FailOnlyOnFlush();
507 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
508 Document doc = new Document();
509 String contents = "aa bb cc dd ee ff gg hh ii jj kk";
510 doc.add(newField("content", contents, Field.Store.NO,
511 Field.Index.ANALYZED));
512 boolean hitError = false;
513 for(int i=0;i<200;i++) {
515 writer.addDocument(doc);
516 } catch (IOException ioe) {
517 // only one flush should fail:
518 assertFalse(hitError);
522 assertTrue(hitError);
524 IndexReader reader = IndexReader.open(dir, true);
525 assertEquals(198, reader.docFreq(new Term("content", "aa")));
530 public void testDocumentsWriterExceptions() throws IOException {
531 Analyzer analyzer = new Analyzer() {
533 public TokenStream tokenStream(String fieldName, Reader reader) {
534 MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
535 tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
536 return new CrashingFilter(fieldName, tokenizer);
540 for(int i=0;i<2;i++) {
542 System.out.println("TEST: cycle i=" + i);
544 MockDirectoryWrapper dir = newDirectory();
545 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy()));
546 writer.setInfoStream(VERBOSE ? System.out : null);
548 // don't allow a sudden merge to clean up the deleted
550 LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
551 lmp.setMergeFactor(Math.max(lmp.getMergeFactor(), 5));
553 Document doc = new Document();
554 doc.add(newField("contents", "here are some contents", Field.Store.YES,
555 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
556 writer.addDocument(doc);
557 writer.addDocument(doc);
558 doc.add(newField("crash", "this should crash after 4 terms", Field.Store.YES,
559 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
560 doc.add(newField("other", "this will not get indexed", Field.Store.YES,
561 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
563 writer.addDocument(doc);
564 fail("did not hit expected exception");
565 } catch (IOException ioe) {
567 System.out.println("TEST: hit expected exception");
568 ioe.printStackTrace(System.out);
573 doc = new Document();
574 doc.add(newField("contents", "here are some contents", Field.Store.YES,
575 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
576 writer.addDocument(doc);
577 writer.addDocument(doc);
582 System.out.println("TEST: open reader");
584 IndexReader reader = IndexReader.open(dir, true);
587 assertEquals(expected, reader.docFreq(new Term("contents", "here")));
588 assertEquals(expected, reader.maxDoc());
590 for(int j=0;j<reader.maxDoc();j++) {
591 if (reader.isDeleted(j))
595 reader.getTermFreqVectors(j);
598 assertEquals(1, numDel);
602 writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
603 analyzer).setMaxBufferedDocs(10));
604 doc = new Document();
605 doc.add(newField("contents", "here are some contents", Field.Store.YES,
606 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
607 for(int j=0;j<17;j++)
608 writer.addDocument(doc);
609 writer.forceMerge(1);
612 reader = IndexReader.open(dir, true);
613 int expected = 19+(1-i)*2;
614 assertEquals(expected, reader.docFreq(new Term("contents", "here")));
615 assertEquals(expected, reader.maxDoc());
617 for(int j=0;j<reader.maxDoc();j++) {
618 if (reader.isDeleted(j))
622 reader.getTermFreqVectors(j);
626 assertEquals(0, numDel);
632 public void testDocumentsWriterExceptionThreads() throws Exception {
633 Analyzer analyzer = new Analyzer() {
635 public TokenStream tokenStream(String fieldName, Reader reader) {
636 MockTokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
637 tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
638 return new CrashingFilter(fieldName, tokenizer);
642 final int NUM_THREAD = 3;
643 final int NUM_ITER = 100;
645 for(int i=0;i<2;i++) {
646 MockDirectoryWrapper dir = newDirectory();
649 final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(-1)
650 .setMergePolicy(newLogMergePolicy(10)));
651 final int finalI = i;
653 Thread[] threads = new Thread[NUM_THREAD];
654 for(int t=0;t<NUM_THREAD;t++) {
655 threads[t] = new Thread() {
659 for(int iter=0;iter<NUM_ITER;iter++) {
660 Document doc = new Document();
661 doc.add(newField("contents", "here are some contents", Field.Store.YES,
662 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
663 writer.addDocument(doc);
664 writer.addDocument(doc);
665 doc.add(newField("crash", "this should crash after 4 terms", Field.Store.YES,
666 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
667 doc.add(newField("other", "this will not get indexed", Field.Store.YES,
668 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
670 writer.addDocument(doc);
671 fail("did not hit expected exception");
672 } catch (IOException ioe) {
676 doc = new Document();
677 doc.add(newField("contents", "here are some contents", Field.Store.YES,
678 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
679 writer.addDocument(doc);
680 writer.addDocument(doc);
683 } catch (Throwable t) {
685 System.out.println(Thread.currentThread().getName() + ": ERROR: hit unexpected exception");
686 t.printStackTrace(System.out);
695 for(int t=0;t<NUM_THREAD;t++)
701 IndexReader reader = IndexReader.open(dir, true);
702 int expected = (3+(1-i)*2)*NUM_THREAD*NUM_ITER;
703 assertEquals("i=" + i, expected, reader.docFreq(new Term("contents", "here")));
704 assertEquals(expected, reader.maxDoc());
706 for(int j=0;j<reader.maxDoc();j++) {
707 if (reader.isDeleted(j))
711 reader.getTermFreqVectors(j);
716 assertEquals(NUM_THREAD*NUM_ITER, numDel);
718 IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
719 TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(10));
720 Document doc = new Document();
721 doc.add(newField("contents", "here are some contents", Field.Store.YES,
722 Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
723 for(int j=0;j<17;j++)
724 writer.addDocument(doc);
725 writer.forceMerge(1);
728 reader = IndexReader.open(dir, true);
729 expected += 17-NUM_THREAD*NUM_ITER;
730 assertEquals(expected, reader.docFreq(new Term("contents", "here")));
731 assertEquals(expected, reader.maxDoc());
733 for(int j=0;j<reader.maxDoc();j++) {
734 if (reader.isDeleted(j))
738 reader.getTermFreqVectors(j);
747 // Throws IOException during MockDirectoryWrapper.sync
748 private static class FailOnlyInSync extends MockDirectoryWrapper.Failure {
751 public void eval(MockDirectoryWrapper dir) throws IOException {
753 StackTraceElement[] trace = new Exception().getStackTrace();
754 for (int i = 0; i < trace.length; i++) {
755 if (doFail && "org.apache.lucene.store.MockDirectoryWrapper".equals(trace[i].getClassName()) && "sync".equals(trace[i].getMethodName())) {
757 throw new IOException("now failing on purpose during sync");
764 // TODO: these are also in TestIndexWriter... add a simple doc-writing method
765 // like this to LuceneTestCase?
766 private void addDoc(IndexWriter writer) throws IOException
768 Document doc = new Document();
769 doc.add(newField("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
770 writer.addDocument(doc);
773 // LUCENE-1044: test exception during sync
774 public void testExceptionDuringSync() throws IOException {
775 MockDirectoryWrapper dir = newDirectory();
776 FailOnlyInSync failure = new FailOnlyInSync();
779 IndexWriter writer = new IndexWriter(
781 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
782 setMaxBufferedDocs(2).
783 setMergeScheduler(new ConcurrentMergeScheduler()).
784 setMergePolicy(newLogMergePolicy(5))
787 ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(5);
789 for (int i = 0; i < 23; i++) {
794 } catch (IOException ioe) {
800 ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
801 assertTrue(failure.didFail);
802 failure.clearDoFail();
805 IndexReader reader = IndexReader.open(dir, true);
806 assertEquals(23, reader.numDocs());
811 private static class FailOnlyInCommit extends MockDirectoryWrapper.Failure {
813 boolean fail1, fail2;
816 public void eval(MockDirectoryWrapper dir) throws IOException {
817 StackTraceElement[] trace = new Exception().getStackTrace();
818 boolean isCommit = false;
819 boolean isDelete = false;
820 for (int i = 0; i < trace.length; i++) {
821 if ("org.apache.lucene.index.SegmentInfos".equals(trace[i].getClassName()) && "prepareCommit".equals(trace[i].getMethodName()))
823 if ("org.apache.lucene.store.MockDirectoryWrapper".equals(trace[i].getClassName()) && "deleteFile".equals(trace[i].getMethodName()))
830 throw new RuntimeException("now fail first");
833 throw new IOException("now fail during delete");
840 public void testExceptionsDuringCommit() throws Throwable {
841 MockDirectoryWrapper dir = newDirectory();
842 dir.setFailOnCreateOutput(false);
843 FailOnlyInCommit failure = new FailOnlyInCommit();
844 IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
845 Document doc = new Document();
846 doc.add(newField("field", "a field", Field.Store.YES,
847 Field.Index.ANALYZED));
853 } catch (IOException ioe) {
854 fail("expected only RuntimeException");
855 } catch (RuntimeException re) {
858 assertTrue(failure.fail1 && failure.fail2);
863 public void testForceMergeExceptions() throws IOException {
864 Directory startDir = newDirectory();
865 IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
866 ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(100);
867 IndexWriter w = new IndexWriter(startDir, conf);
868 for(int i=0;i<27;i++)
872 int iter = TEST_NIGHTLY ? 200 : 20;
873 for(int i=0;i<iter;i++) {
875 System.out.println("TEST: iter " + i);
877 MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
878 conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new ConcurrentMergeScheduler());
879 ((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions();
880 w = new IndexWriter(dir, conf);
881 w.setInfoStream(VERBOSE ? System.out : null);
882 dir.setRandomIOExceptionRate(0.5);
885 } catch (IOException ioe) {
886 if (ioe.getCause() == null)
887 fail("forceMerge threw IOException without root cause");
889 dir.setRandomIOExceptionRate(0);
897 public void testOutOfMemoryErrorCausesCloseToFail() throws Exception {
899 final List<Throwable> thrown = new ArrayList<Throwable>();
900 final Directory dir = newDirectory();
901 final IndexWriter writer = new IndexWriter(dir,
902 newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))) {
904 public void message(final String message) {
905 if (message.startsWith("now flush at close") && 0 == thrown.size()) {
907 throw new OutOfMemoryError("fake OOME at " + message);
912 // need to set an info stream so message is called
913 writer.setInfoStream(new PrintStream(new ByteArrayOutputStream()));
916 fail("OutOfMemoryError expected");
918 catch (final OutOfMemoryError expected) {}
920 // throws IllegalStateEx w/o bug fix
926 private static final class MockIndexWriter4 extends IndexWriter {
928 public MockIndexWriter4(Directory dir, IndexWriterConfig conf) throws IOException {
935 boolean testPoint(String name) {
936 if (doFail && name.equals("rollback before checkpoint"))
937 throw new RuntimeException("intentionally failing");
943 public void testRollbackExceptionHang() throws Throwable {
944 Directory dir = newDirectory();
945 MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
951 fail("did not hit intentional RuntimeException");
952 } catch (RuntimeException re) {
961 // LUCENE-1044: Simulate checksum error in segments_N
962 public void testSegmentsChecksumError() throws IOException {
963 Directory dir = newDirectory();
965 IndexWriter writer = null;
967 writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
970 for (int i = 0; i < 100; i++) {
977 long gen = SegmentInfos.getCurrentSegmentGeneration(dir);
978 assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
980 final String segmentsFileName = SegmentInfos.getCurrentSegmentFileName(dir);
981 IndexInput in = dir.openInput(segmentsFileName);
982 IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1+gen));
983 out.copyBytes(in, in.length()-1);
984 byte b = in.readByte();
985 out.writeByte((byte) (1+b));
989 IndexReader reader = null;
991 reader = IndexReader.open(dir, true);
992 } catch (IOException e) {
993 e.printStackTrace(System.out);
994 fail("segmentInfos failed to retry fallback to correct segments_N file");
1000 // Simulate a corrupt index by removing last byte of
1001 // latest segments file and make sure we get an
1002 // IOException trying to open the index:
1003 public void testSimulatedCorruptIndex1() throws IOException {
1004 MockDirectoryWrapper dir = newDirectory();
1005 dir.setCheckIndexOnClose(false); // we are corrupting it!
1007 IndexWriter writer = null;
1009 writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1011 // add 100 documents
1012 for (int i = 0; i < 100; i++) {
1019 long gen = SegmentInfos.getCurrentSegmentGeneration(dir);
1020 assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
1022 String fileNameIn = SegmentInfos.getCurrentSegmentFileName(dir);
1023 String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
1026 IndexInput in = dir.openInput(fileNameIn);
1027 IndexOutput out = dir.createOutput(fileNameOut);
1028 long length = in.length();
1029 for(int i=0;i<length-1;i++) {
1030 out.writeByte(in.readByte());
1034 dir.deleteFile(fileNameIn);
1036 IndexReader reader = null;
1038 reader = IndexReader.open(dir, true);
1039 fail("reader did not hit IOException on opening a corrupt index");
1040 } catch (Exception e) {
1042 if (reader != null) {
1048 // Simulate a corrupt index by removing one of the cfs
1049 // files and make sure we get an IOException trying to
1051 public void testSimulatedCorruptIndex2() throws IOException {
1052 MockDirectoryWrapper dir = newDirectory();
1053 dir.setCheckIndexOnClose(false); // we are corrupting it!
1054 IndexWriter writer = null;
1056 writer = new IndexWriter(
1058 newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
1059 setMergePolicy(newLogMergePolicy(true))
1061 ((LogMergePolicy) writer.getConfig().getMergePolicy()).setNoCFSRatio(1.0);
1063 // add 100 documents
1064 for (int i = 0; i < 100; i++) {
1071 long gen = SegmentInfos.getCurrentSegmentGeneration(dir);
1072 assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
1074 String[] files = dir.listAll();
1075 boolean corrupted = false;
1076 for(int i=0;i<files.length;i++) {
1077 if (files[i].endsWith(".cfs")) {
1078 dir.deleteFile(files[i]);
1083 assertTrue("failed to find cfs file to remove", corrupted);
1085 IndexReader reader = null;
1087 reader = IndexReader.open(dir, true);
1088 fail("reader did not hit IOException on opening a corrupt index");
1089 } catch (Exception e) {
1091 if (reader != null) {
1097 // Simulate a writer that crashed while writing segments
1098 // file: make sure we can still open the index (ie,
1099 // gracefully fallback to the previous segments file),
1100 // and that we can add to the index:
1101 public void testSimulatedCrashedWriter() throws IOException {
1102 MockDirectoryWrapper dir = newDirectory();
1103 dir.setPreventDoubleWrite(false);
1105 IndexWriter writer = null;
1107 writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
1109 // add 100 documents
1110 for (int i = 0; i < 100; i++) {
1117 long gen = SegmentInfos.getCurrentSegmentGeneration(dir);
1118 assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
1120 // Make the next segments file, with last byte
1121 // missing, to simulate a writer that crashed while
1122 // writing segments file:
1123 String fileNameIn = SegmentInfos.getCurrentSegmentFileName(dir);
1124 String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
1127 IndexInput in = dir.openInput(fileNameIn);
1128 IndexOutput out = dir.createOutput(fileNameOut);
1129 long length = in.length();
1130 for(int i=0;i<length-1;i++) {
1131 out.writeByte(in.readByte());
1136 IndexReader reader = null;
1138 reader = IndexReader.open(dir, true);
1139 } catch (Exception e) {
1140 fail("reader failed to open on a crashed index");
1145 writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
1146 } catch (Exception e) {
1147 e.printStackTrace(System.out);
1148 fail("writer failed to open on a crashed index");
1151 // add 100 documents
1152 for (int i = 0; i < 100; i++) {
1161 public void testAddDocsNonAbortingException() throws Exception {
1162 final Directory dir = newDirectory();
1163 final RandomIndexWriter w = new RandomIndexWriter(random, dir);
1164 final int numDocs1 = random.nextInt(25);
1165 for(int docCount=0;docCount<numDocs1;docCount++) {
1166 Document doc = new Document();
1167 doc.add(newField("content", "good content", Field.Index.ANALYZED));
1171 final List<Document> docs = new ArrayList<Document>();
1172 for(int docCount=0;docCount<7;docCount++) {
1173 Document doc = new Document();
1175 doc.add(newField("id", docCount+"", Field.Index.NOT_ANALYZED));
1176 doc.add(newField("content", "silly content " + docCount, Field.Index.ANALYZED));
1177 if (docCount == 4) {
1178 Field f = newField("crash", "", Field.Index.ANALYZED);
1180 MockTokenizer tokenizer = new MockTokenizer(new StringReader("crash me on the 4th token"), MockTokenizer.WHITESPACE, false);
1181 tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
1182 f.setTokenStream(new CrashingFilter("crash", tokenizer));
1186 w.addDocuments(docs);
1187 // BUG: CrashingFilter didn't
1188 fail("did not hit expected exception");
1189 } catch (IOException ioe) {
1191 assertEquals(CRASH_FAIL_MESSAGE, ioe.getMessage());
1194 final int numDocs2 = random.nextInt(25);
1195 for(int docCount=0;docCount<numDocs2;docCount++) {
1196 Document doc = new Document();
1197 doc.add(newField("content", "good content", Field.Index.ANALYZED));
1201 final IndexReader r = w.getReader();
1204 final IndexSearcher s = new IndexSearcher(r);
1205 PhraseQuery pq = new PhraseQuery();
1206 pq.add(new Term("content", "silly"));
1207 pq.add(new Term("content", "content"));
1208 assertEquals(0, s.search(pq, 1).totalHits);
1210 pq = new PhraseQuery();
1211 pq.add(new Term("content", "good"));
1212 pq.add(new Term("content", "content"));
1213 assertEquals(numDocs1+numDocs2, s.search(pq, 1).totalHits);
1219 public void testUpdateDocsNonAbortingException() throws Exception {
1220 final Directory dir = newDirectory();
1221 final RandomIndexWriter w = new RandomIndexWriter(random, dir);
1222 final int numDocs1 = random.nextInt(25);
1223 for(int docCount=0;docCount<numDocs1;docCount++) {
1224 Document doc = new Document();
1225 doc.add(newField("content", "good content", Field.Index.ANALYZED));
1229 // Use addDocs (no exception) to get docs in the index:
1230 final List<Document> docs = new ArrayList<Document>();
1231 final int numDocs2 = random.nextInt(25);
1232 for(int docCount=0;docCount<numDocs2;docCount++) {
1233 Document doc = new Document();
1235 doc.add(newField("subid", "subs", Field.Index.NOT_ANALYZED));
1236 doc.add(newField("id", docCount+"", Field.Index.NOT_ANALYZED));
1237 doc.add(newField("content", "silly content " + docCount, Field.Index.ANALYZED));
1239 w.addDocuments(docs);
1241 final int numDocs3 = random.nextInt(25);
1242 for(int docCount=0;docCount<numDocs3;docCount++) {
1243 Document doc = new Document();
1244 doc.add(newField("content", "good content", Field.Index.ANALYZED));
1249 final int limit = _TestUtil.nextInt(random, 2, 25);
1250 final int crashAt = random.nextInt(limit);
1251 for(int docCount=0;docCount<limit;docCount++) {
1252 Document doc = new Document();
1254 doc.add(newField("id", docCount+"", Field.Index.NOT_ANALYZED));
1255 doc.add(newField("content", "silly content " + docCount, Field.Index.ANALYZED));
1256 if (docCount == crashAt) {
1257 Field f = newField("crash", "", Field.Index.ANALYZED);
1259 MockTokenizer tokenizer = new MockTokenizer(new StringReader("crash me on the 4th token"), MockTokenizer.WHITESPACE, false);
1260 tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
1261 f.setTokenStream(new CrashingFilter("crash", tokenizer));
1266 w.updateDocuments(new Term("subid", "subs"), docs);
1267 // BUG: CrashingFilter didn't
1268 fail("did not hit expected exception");
1269 } catch (IOException ioe) {
1271 assertEquals(CRASH_FAIL_MESSAGE, ioe.getMessage());
1274 final int numDocs4 = random.nextInt(25);
1275 for(int docCount=0;docCount<numDocs4;docCount++) {
1276 Document doc = new Document();
1277 doc.add(newField("content", "good content", Field.Index.ANALYZED));
1281 final IndexReader r = w.getReader();
1284 final IndexSearcher s = new IndexSearcher(r);
1285 PhraseQuery pq = new PhraseQuery();
1286 pq.add(new Term("content", "silly"));
1287 pq.add(new Term("content", "content"));
1288 assertEquals(numDocs2, s.search(pq, 1).totalHits);
1290 pq = new PhraseQuery();
1291 pq.add(new Term("content", "good"));
1292 pq.add(new Term("content", "content"));
1293 assertEquals(numDocs1+numDocs3+numDocs4, s.search(pq, 1).totalHits);