pylucene 3.5.0-3
[pylucene.git] / lucene-java-3.5.0 / lucene / backwards / src / test / org / apache / lucene / index / TestThreadedOptimize.java
diff --git a/lucene-java-3.5.0/lucene/backwards/src/test/org/apache/lucene/index/TestThreadedOptimize.java b/lucene-java-3.5.0/lucene/backwards/src/test/org/apache/lucene/index/TestThreadedOptimize.java
new file mode 100644 (file)
index 0000000..64400db
--- /dev/null
@@ -0,0 +1,136 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.MockTokenizer;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.util.English;
+
+import org.apache.lucene.util.LuceneTestCase;
+
+import java.util.Random;
+
+public class TestThreadedOptimize extends LuceneTestCase {
+  
+  private static final Analyzer ANALYZER = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
+
+  private final static int NUM_THREADS = 3;
+  //private final static int NUM_THREADS = 5;
+
+  private final static int NUM_ITER = 1;
+
+  private final static int NUM_ITER2 = 1;
+
+  private volatile boolean failed;
+
+  private void setFailed() {
+    failed = true;
+  }
+
+  public void runTest(Random random, Directory directory) throws Exception {
+
+    IndexWriter writer = new IndexWriter(
+        directory,
+        newIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).
+            setOpenMode(OpenMode.CREATE).
+            setMaxBufferedDocs(2).
+            setMergePolicy(newLogMergePolicy())
+    );
+
+    for(int iter=0;iter<NUM_ITER;iter++) {
+      final int iterFinal = iter;
+
+      setMergeFactor(writer.getConfig().getMergePolicy(), 100);
+
+      for(int i=0;i<200;i++) {
+        Document d = new Document();
+        d.add(newField("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
+        d.add(newField("contents", English.intToEnglish(i), Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
+        writer.addDocument(d);
+      }
+
+      ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
+      writer.setInfoStream(VERBOSE ? System.out : null);
+
+      Thread[] threads = new Thread[NUM_THREADS];
+      
+      for(int i=0;i<NUM_THREADS;i++) {
+        final int iFinal = i;
+        final IndexWriter writerFinal = writer;
+        threads[i] = new Thread() {
+          @Override
+          public void run() {
+            try {
+              for(int j=0;j<NUM_ITER2;j++) {
+                writerFinal.optimize(false);
+                for(int k=0;k<17*(1+iFinal);k++) {
+                  Document d = new Document();
+                  d.add(newField("id", iterFinal + "_" + iFinal + "_" + j + "_" + k, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
+                  d.add(newField("contents", English.intToEnglish(iFinal+k), Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
+                  writerFinal.addDocument(d);
+                }
+                for(int k=0;k<9*(1+iFinal);k++)
+                  writerFinal.deleteDocuments(new Term("id", iterFinal + "_" + iFinal + "_" + j + "_" + k));
+                writerFinal.optimize();
+              }
+            } catch (Throwable t) {
+              setFailed();
+              System.out.println(Thread.currentThread().getName() + ": hit exception");
+              t.printStackTrace(System.out);
+            }
+          }
+        };
+      }
+
+      for(int i=0;i<NUM_THREADS;i++)
+        threads[i].start();
+
+      for(int i=0;i<NUM_THREADS;i++)
+        threads[i].join();
+
+      assertTrue(!failed);
+
+      final int expectedDocCount = (int) ((1+iter)*(200+8*NUM_ITER2*(NUM_THREADS/2.0)*(1+NUM_THREADS)));
+
+      assertEquals("index=" + writer.segString() + " numDocs=" + writer.numDocs() + " maxDoc=" + writer.maxDoc() + " config=" + writer.getConfig(), expectedDocCount, writer.numDocs());
+      assertEquals("index=" + writer.segString() + " numDocs=" + writer.numDocs() + " maxDoc=" + writer.maxDoc() + " config=" + writer.getConfig(), expectedDocCount, writer.maxDoc());
+
+      writer.close();
+      writer = new IndexWriter(directory, newIndexWriterConfig(
+          TEST_VERSION_CURRENT, ANALYZER).setOpenMode(
+          OpenMode.APPEND).setMaxBufferedDocs(2));
+      
+      IndexReader reader = IndexReader.open(directory, true);
+      assertTrue("reader=" + reader, reader.isOptimized());
+      assertEquals(expectedDocCount, reader.numDocs());
+      reader.close();
+    }
+    writer.close();
+  }
+
+  public void testThreadedOptimize() throws Exception {
+    Directory directory = newDirectory();
+    runTest(random, directory);
+    directory.close();
+  }
+}