X-Git-Url: https://git.mdrn.pl/pylucene.git/blobdiff_plain/a2e61f0c04805cfcb8706176758d1283c7e3a55c..aaeed5504b982cf3545252ab528713250aa33eed:/lucene-java-3.4.0/lucene/src/test/org/apache/lucene/index/TestLongPostings.java diff --git a/lucene-java-3.4.0/lucene/src/test/org/apache/lucene/index/TestLongPostings.java b/lucene-java-3.4.0/lucene/src/test/org/apache/lucene/index/TestLongPostings.java deleted file mode 100644 index ee68c9f..0000000 --- a/lucene-java-3.4.0/lucene/src/test/org/apache/lucene/index/TestLongPostings.java +++ /dev/null @@ -1,456 +0,0 @@ -package org.apache.lucene.index; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.IOException; -import java.io.StringReader; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.tokenattributes.TermAttribute; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.FieldInfo.IndexOptions; -import org.apache.lucene.store.Directory; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.FixedBitSet; -import org.apache.lucene.util._TestUtil; - -public class TestLongPostings extends LuceneTestCase { - - // Produces a realistic unicode random string that - // survives MockAnalyzer unchanged: - private String getRandomTerm(String other) throws IOException { - Analyzer a = new MockAnalyzer(random); - while(true) { - String s = _TestUtil.randomRealisticUnicodeString(random); - if (other != null && s.equals(other)) { - continue; - } - final TokenStream ts = a.tokenStream("foo", new StringReader(s)); - final TermAttribute termAtt = ts.getAttribute(TermAttribute.class); - int count = 0; - ts.reset(); - while(ts.incrementToken()) { - if (count == 0 && !termAtt.term().equals(s)) { - break; - } - count++; - } - if (count == 1) { - return s; - } - } - } - - public void testLongPostings() throws Exception { - // Don't use _TestUtil.getTempDir so that we own the - // randomness (ie same seed will point to same dir): - Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random.nextLong())); - - final int NUM_DOCS = atLeast(2000); - - if (VERBOSE) { - System.out.println("TEST: NUM_DOCS=" + NUM_DOCS); - } - - final String s1 = getRandomTerm(null); - final String s2 = getRandomTerm(s1); - - if (VERBOSE) { - System.out.println("\nTEST: s1=" + s1 + " s2=" + s2); - /* - for(int idx=0;idx 0); - assertTrue(r.docFreq(new Term("field", s2)) > 0); - - final byte[] payload = new byte[100]; - - int num = atLeast(1000); - for(int iter=0;iter 0); - assertTrue(r.docFreq(new Term("field", s2)) > 0); - - final byte[] payload = new byte[100]; - - int num = atLeast(1000); - for(int iter=0;iter