1 package org.apache.lucene.queryParser;
4 * Licensed to the Apache Software Foundation (ASF) under one or more
5 * contributor license agreements. See the NOTICE file distributed with
6 * this work for additional information regarding copyright ownership.
7 * The ASF licenses this file to You under the Apache License, Version 2.0
8 * (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
20 import java.io.IOException;
21 import java.io.Reader;
22 import java.text.Collator;
23 import java.text.DateFormat;
24 import java.util.Calendar;
25 import java.util.Date;
26 import java.util.GregorianCalendar;
27 import java.util.HashSet;
28 import java.util.Locale;
31 import org.apache.lucene.analysis.Analyzer;
32 import org.apache.lucene.analysis.KeywordAnalyzer;
33 import org.apache.lucene.analysis.LowerCaseTokenizer;
34 import org.apache.lucene.analysis.MockAnalyzer;
35 import org.apache.lucene.analysis.MockTokenizer;
36 import org.apache.lucene.analysis.StopAnalyzer;
37 import org.apache.lucene.analysis.StopFilter;
38 import org.apache.lucene.analysis.TokenFilter;
39 import org.apache.lucene.analysis.TokenStream;
40 import org.apache.lucene.analysis.WhitespaceAnalyzer;
41 import org.apache.lucene.analysis.standard.StandardAnalyzer;
42 import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
43 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
44 import org.apache.lucene.document.DateField;
45 import org.apache.lucene.document.DateTools;
46 import org.apache.lucene.document.Document;
47 import org.apache.lucene.document.Field;
48 import org.apache.lucene.index.IndexWriter;
49 import org.apache.lucene.index.Term;
50 import org.apache.lucene.index.IndexReader;
51 import org.apache.lucene.search.BooleanQuery;
52 import org.apache.lucene.search.BooleanClause;
53 import org.apache.lucene.search.MultiTermQuery;
54 import org.apache.lucene.search.FuzzyQuery;
55 import org.apache.lucene.search.IndexSearcher;
56 import org.apache.lucene.search.MatchAllDocsQuery;
57 import org.apache.lucene.search.PhraseQuery;
58 import org.apache.lucene.search.PrefixQuery;
59 import org.apache.lucene.search.Query;
60 import org.apache.lucene.search.TermRangeQuery;
61 import org.apache.lucene.search.ScoreDoc;
62 import org.apache.lucene.search.TermQuery;
63 import org.apache.lucene.search.WildcardQuery;
64 import org.apache.lucene.store.Directory;
65 import org.apache.lucene.util.LuceneTestCase;
68 * Base Test class for QueryParser subclasses
70 // TODO: it would be better to refactor the parts that are specific really
71 // to the core QP and subclass/use the parts that are not in the contrib QP
72 public abstract class QueryParserTestBase extends LuceneTestCase {
74 public static Analyzer qpAnalyzer = new QPTestAnalyzer();
76 public static final class QPTestFilter extends TokenFilter {
77 CharTermAttribute termAtt;
78 OffsetAttribute offsetAtt;
81 * Filter which discards the token 'stop' and which expands the
82 * token 'phrase' into 'phrase1 phrase2'
84 public QPTestFilter(TokenStream in) {
86 termAtt = addAttribute(CharTermAttribute.class);
87 offsetAtt = addAttribute(OffsetAttribute.class);
90 boolean inPhrase = false;
91 int savedStart = 0, savedEnd = 0;
94 public boolean incrementToken() throws IOException {
98 termAtt.append("phrase2");
99 offsetAtt.setOffset(savedStart, savedEnd);
102 while (input.incrementToken()) {
103 if (termAtt.toString().equals("phrase")) {
105 savedStart = offsetAtt.startOffset();
106 savedEnd = offsetAtt.endOffset();
107 termAtt.setEmpty().append("phrase1");
108 offsetAtt.setOffset(savedStart, savedEnd);
110 } else if (!termAtt.toString().equals("stop"))
118 public static final class QPTestAnalyzer extends Analyzer {
120 /** Filters LowerCaseTokenizer with StopFilter. */
122 public final TokenStream tokenStream(String fieldName, Reader reader) {
123 return new QPTestFilter(new LowerCaseTokenizer(TEST_VERSION_CURRENT, reader));
127 public static class QPTestParser extends QueryParser {
128 public QPTestParser(String f, Analyzer a) {
129 super(TEST_VERSION_CURRENT, f, a);
133 protected Query getFuzzyQuery(String field, String termStr, float minSimilarity) throws ParseException {
134 throw new ParseException("Fuzzy queries not allowed");
138 protected Query getWildcardQuery(String field, String termStr) throws ParseException {
139 throw new ParseException("Wildcard queries not allowed");
143 private int originalMaxClauses;
146 public void setUp() throws Exception {
148 originalMaxClauses = BooleanQuery.getMaxClauseCount();
151 public abstract QueryParser getParser(Analyzer a) throws Exception;
153 public Query getQuery(String query, Analyzer a) throws Exception {
154 return getParser(a).parse(query);
157 public void assertQueryEquals(String query, Analyzer a, String result)
159 Query q = getQuery(query, a);
160 String s = q.toString("field");
161 if (!s.equals(result)) {
162 fail("Query /" + query + "/ yielded /" + s
163 + "/, expecting /" + result + "/");
167 public void assertQueryEquals(QueryParser qp, String field, String query, String result)
169 Query q = qp.parse(query);
170 String s = q.toString(field);
171 if (!s.equals(result)) {
172 fail("Query /" + query + "/ yielded /" + s
173 + "/, expecting /" + result + "/");
177 public void assertEscapedQueryEquals(String query, Analyzer a, String result)
179 String escapedQuery = QueryParser.escape(query);
180 if (!escapedQuery.equals(result)) {
181 fail("Query /" + query + "/ yielded /" + escapedQuery
182 + "/, expecting /" + result + "/");
186 public void assertWildcardQueryEquals(String query, boolean lowercase, String result, boolean allowLeadingWildcard)
188 QueryParser qp = getParser(null);
189 qp.setLowercaseExpandedTerms(lowercase);
190 qp.setAllowLeadingWildcard(allowLeadingWildcard);
191 Query q = qp.parse(query);
192 String s = q.toString("field");
193 if (!s.equals(result)) {
194 fail("WildcardQuery /" + query + "/ yielded /" + s
195 + "/, expecting /" + result + "/");
199 public void assertWildcardQueryEquals(String query, boolean lowercase, String result)
201 assertWildcardQueryEquals(query, lowercase, result, false);
204 public void assertWildcardQueryEquals(String query, String result) throws Exception {
205 QueryParser qp = getParser(null);
206 Query q = qp.parse(query);
207 String s = q.toString("field");
208 if (!s.equals(result)) {
209 fail("WildcardQuery /" + query + "/ yielded /" + s + "/, expecting /"
214 public Query getQueryDOA(String query, Analyzer a)
217 a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
218 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", a);
219 qp.setDefaultOperator(QueryParser.AND_OPERATOR);
220 return qp.parse(query);
223 public void assertQueryEqualsDOA(String query, Analyzer a, String result)
225 Query q = getQueryDOA(query, a);
226 String s = q.toString("field");
227 if (!s.equals(result)) {
228 fail("Query /" + query + "/ yielded /" + s
229 + "/, expecting /" + result + "/");
233 public void testCJK() throws Exception {
234 // Test Ideographic Space - As wide as a CJK character cell (fullwidth)
235 // used google to translate the word "term" to japanese -> 用語
236 assertQueryEquals("term\u3000term\u3000term", null, "term\u0020term\u0020term");
237 assertQueryEquals("用語\u3000用語\u3000用語", null, "用語\u0020用語\u0020用語");
240 public void testCJKTerm() throws Exception {
241 // individual CJK chars as terms
242 StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
244 BooleanQuery expected = new BooleanQuery();
245 expected.add(new TermQuery(new Term("field", "ä¸")), BooleanClause.Occur.SHOULD);
246 expected.add(new TermQuery(new Term("field", "国")), BooleanClause.Occur.SHOULD);
248 assertEquals(expected, getQuery("ä¸å›½", analyzer));
251 public void testCJKBoostedTerm() throws Exception {
252 // individual CJK chars as terms
253 StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
255 BooleanQuery expected = new BooleanQuery();
256 expected.setBoost(0.5f);
257 expected.add(new TermQuery(new Term("field", "ä¸")), BooleanClause.Occur.SHOULD);
258 expected.add(new TermQuery(new Term("field", "国")), BooleanClause.Occur.SHOULD);
260 assertEquals(expected, getQuery("ä¸å›½^0.5", analyzer));
263 public void testCJKPhrase() throws Exception {
264 // individual CJK chars as terms
265 StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
267 PhraseQuery expected = new PhraseQuery();
268 expected.add(new Term("field", "ä¸"));
269 expected.add(new Term("field", "国"));
271 assertEquals(expected, getQuery("\"ä¸å›½\"", analyzer));
274 public void testCJKBoostedPhrase() throws Exception {
275 // individual CJK chars as terms
276 StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
278 PhraseQuery expected = new PhraseQuery();
279 expected.setBoost(0.5f);
280 expected.add(new Term("field", "ä¸"));
281 expected.add(new Term("field", "国"));
283 assertEquals(expected, getQuery("\"ä¸å›½\"^0.5", analyzer));
286 public void testCJKSloppyPhrase() throws Exception {
287 // individual CJK chars as terms
288 StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
290 PhraseQuery expected = new PhraseQuery();
292 expected.add(new Term("field", "ä¸"));
293 expected.add(new Term("field", "国"));
295 assertEquals(expected, getQuery("\"ä¸å›½\"~3", analyzer));
298 public void testAutoGeneratePhraseQueriesOn() throws Exception {
299 // individual CJK chars as terms
300 StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
302 PhraseQuery expected = new PhraseQuery();
303 expected.add(new Term("field", "ä¸"));
304 expected.add(new Term("field", "国"));
305 QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "field", analyzer);
306 parser.setAutoGeneratePhraseQueries(true);
307 assertEquals(expected, parser.parse("ä¸å›½"));
310 public void testSimple() throws Exception {
311 assertQueryEquals("term term term", null, "term term term");
312 assertQueryEquals("türm term term", new MockAnalyzer(random), "türm term term");
313 assertQueryEquals("ümlaut", new MockAnalyzer(random), "ümlaut");
315 assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
316 assertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
318 assertQueryEquals("a AND b", null, "+a +b");
319 assertQueryEquals("(a AND b)", null, "+a +b");
320 assertQueryEquals("c OR (a AND b)", null, "c (+a +b)");
321 assertQueryEquals("a AND NOT b", null, "+a -b");
322 assertQueryEquals("a AND -b", null, "+a -b");
323 assertQueryEquals("a AND !b", null, "+a -b");
324 assertQueryEquals("a && b", null, "+a +b");
325 assertQueryEquals("a && ! b", null, "+a -b");
327 assertQueryEquals("a OR b", null, "a b");
328 assertQueryEquals("a || b", null, "a b");
329 assertQueryEquals("a OR !b", null, "a -b");
330 assertQueryEquals("a OR ! b", null, "a -b");
331 assertQueryEquals("a OR -b", null, "a -b");
333 assertQueryEquals("+term -term term", null, "+term -term term");
334 assertQueryEquals("foo:term AND field:anotherTerm", null,
335 "+foo:term +anotherterm");
336 assertQueryEquals("term AND \"phrase phrase\"", null,
337 "+term +\"phrase phrase\"");
338 assertQueryEquals("\"hello there\"", null, "\"hello there\"");
339 assertTrue(getQuery("a AND b", null) instanceof BooleanQuery);
340 assertTrue(getQuery("hello", null) instanceof TermQuery);
341 assertTrue(getQuery("\"hello there\"", null) instanceof PhraseQuery);
343 assertQueryEquals("germ term^2.0", null, "germ term^2.0");
344 assertQueryEquals("(term)^2.0", null, "term^2.0");
345 assertQueryEquals("(germ term)^2.0", null, "(germ term)^2.0");
346 assertQueryEquals("term^2.0", null, "term^2.0");
347 assertQueryEquals("term^2", null, "term^2.0");
348 assertQueryEquals("\"germ term\"^2.0", null, "\"germ term\"^2.0");
349 assertQueryEquals("\"term germ\"^2", null, "\"term germ\"^2.0");
351 assertQueryEquals("(foo OR bar) AND (baz OR boo)", null,
352 "+(foo bar) +(baz boo)");
353 assertQueryEquals("((a OR b) AND NOT c) OR d", null,
355 assertQueryEquals("+(apple \"steve jobs\") -(foo bar baz)", null,
356 "+(apple \"steve jobs\") -(foo bar baz)");
357 assertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null,
358 "+(title:dog title:cat) -author:\"bob dole\"");
360 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random));
361 // make sure OR is the default:
362 assertEquals(QueryParser.OR_OPERATOR, qp.getDefaultOperator());
363 qp.setDefaultOperator(QueryParser.AND_OPERATOR);
364 assertEquals(QueryParser.AND_OPERATOR, qp.getDefaultOperator());
365 qp.setDefaultOperator(QueryParser.OR_OPERATOR);
366 assertEquals(QueryParser.OR_OPERATOR, qp.getDefaultOperator());
369 public void testPunct() throws Exception {
370 Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
371 assertQueryEquals("a&b", a, "a&b");
372 assertQueryEquals("a&&b", a, "a&&b");
373 assertQueryEquals(".NET", a, ".NET");
376 public void testSlop() throws Exception {
377 assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
378 assertQueryEquals("\"term germ\"~2 flork", null, "\"term germ\"~2 flork");
379 assertQueryEquals("\"term\"~2", null, "term");
380 assertQueryEquals("\" \"~2 germ", null, "germ");
381 assertQueryEquals("\"term germ\"~2^2", null, "\"term germ\"~2^2.0");
384 public void testNumber() throws Exception {
385 // The numbers go away because SimpleAnalzyer ignores them
386 assertQueryEquals("3", null, "");
387 assertQueryEquals("term 1.0 1 2", null, "term");
388 assertQueryEquals("term term1 term2", null, "term term term");
390 Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, true);
391 assertQueryEquals("3", a, "3");
392 assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
393 assertQueryEquals("term term1 term2", a, "term term1 term2");
396 public void testWildcard() throws Exception {
397 assertQueryEquals("term*", null, "term*");
398 assertQueryEquals("term*^2", null, "term*^2.0");
399 assertQueryEquals("term~", null, "term~0.5");
400 assertQueryEquals("term~0.7", null, "term~0.7");
401 assertQueryEquals("term~^2", null, "term~0.5^2.0");
402 assertQueryEquals("term^2~", null, "term~0.5^2.0");
403 assertQueryEquals("term*germ", null, "term*germ");
404 assertQueryEquals("term*germ^3", null, "term*germ^3.0");
406 assertTrue(getQuery("term*", null) instanceof PrefixQuery);
407 assertTrue(getQuery("term*^2", null) instanceof PrefixQuery);
408 assertTrue(getQuery("term~", null) instanceof FuzzyQuery);
409 assertTrue(getQuery("term~0.7", null) instanceof FuzzyQuery);
410 FuzzyQuery fq = (FuzzyQuery)getQuery("term~0.7", null);
411 assertEquals(0.7f, fq.getMinSimilarity(), 0.1f);
412 assertEquals(FuzzyQuery.defaultPrefixLength, fq.getPrefixLength());
413 fq = (FuzzyQuery)getQuery("term~", null);
414 assertEquals(0.5f, fq.getMinSimilarity(), 0.1f);
415 assertEquals(FuzzyQuery.defaultPrefixLength, fq.getPrefixLength());
417 assertParseException("term~1.1"); // value > 1, throws exception
419 assertTrue(getQuery("term*germ", null) instanceof WildcardQuery);
421 /* Tests to see that wild card terms are (or are not) properly
422 * lower-cased with propery parser configuration
424 // First prefix queries:
425 // by default, convert to lowercase:
426 assertWildcardQueryEquals("Term*", true, "term*");
427 // explicitly set lowercase:
428 assertWildcardQueryEquals("term*", true, "term*");
429 assertWildcardQueryEquals("Term*", true, "term*");
430 assertWildcardQueryEquals("TERM*", true, "term*");
431 // explicitly disable lowercase conversion:
432 assertWildcardQueryEquals("term*", false, "term*");
433 assertWildcardQueryEquals("Term*", false, "Term*");
434 assertWildcardQueryEquals("TERM*", false, "TERM*");
435 // Then 'full' wildcard queries:
436 // by default, convert to lowercase:
437 assertWildcardQueryEquals("Te?m", "te?m");
438 // explicitly set lowercase:
439 assertWildcardQueryEquals("te?m", true, "te?m");
440 assertWildcardQueryEquals("Te?m", true, "te?m");
441 assertWildcardQueryEquals("TE?M", true, "te?m");
442 assertWildcardQueryEquals("Te?m*gerM", true, "te?m*germ");
443 // explicitly disable lowercase conversion:
444 assertWildcardQueryEquals("te?m", false, "te?m");
445 assertWildcardQueryEquals("Te?m", false, "Te?m");
446 assertWildcardQueryEquals("TE?M", false, "TE?M");
447 assertWildcardQueryEquals("Te?m*gerM", false, "Te?m*gerM");
449 assertWildcardQueryEquals("Term~", "term~0.5");
450 assertWildcardQueryEquals("Term~", true, "term~0.5");
451 assertWildcardQueryEquals("Term~", false, "Term~0.5");
453 assertWildcardQueryEquals("[A TO C]", "[a TO c]");
454 assertWildcardQueryEquals("[A TO C]", true, "[a TO c]");
455 assertWildcardQueryEquals("[A TO C]", false, "[A TO C]");
456 // Test suffix queries: first disallow
458 assertWildcardQueryEquals("*Term", true, "*term");
460 } catch(ParseException pe) {
461 // expected exception
464 assertWildcardQueryEquals("?Term", true, "?term");
466 } catch(ParseException pe) {
467 // expected exception
469 // Test suffix queries: then allow
470 assertWildcardQueryEquals("*Term", true, "*term", true);
471 assertWildcardQueryEquals("?Term", true, "?term", true);
474 public void testLeadingWildcardType() throws Exception {
475 QueryParser qp = getParser(null);
476 qp.setAllowLeadingWildcard(true);
477 assertEquals(WildcardQuery.class, qp.parse("t*erm*").getClass());
478 assertEquals(WildcardQuery.class, qp.parse("?term*").getClass());
479 assertEquals(WildcardQuery.class, qp.parse("*term*").getClass());
482 public void testQPA() throws Exception {
483 assertQueryEquals("term term^3.0 term", qpAnalyzer, "term term^3.0 term");
484 assertQueryEquals("term stop^3.0 term", qpAnalyzer, "term term");
486 assertQueryEquals("term term term", qpAnalyzer, "term term term");
487 assertQueryEquals("term +stop term", qpAnalyzer, "term term");
488 assertQueryEquals("term -stop term", qpAnalyzer, "term term");
490 assertQueryEquals("drop AND (stop) AND roll", qpAnalyzer, "+drop +roll");
491 assertQueryEquals("term +(stop) term", qpAnalyzer, "term term");
492 assertQueryEquals("term -(stop) term", qpAnalyzer, "term term");
494 assertQueryEquals("drop AND stop AND roll", qpAnalyzer, "+drop +roll");
495 assertQueryEquals("term phrase term", qpAnalyzer,
496 "term (phrase1 phrase2) term");
497 assertQueryEquals("term AND NOT phrase term", qpAnalyzer,
498 "+term -(phrase1 phrase2) term");
499 assertQueryEquals("stop^3", qpAnalyzer, "");
500 assertQueryEquals("stop", qpAnalyzer, "");
501 assertQueryEquals("(stop)^3", qpAnalyzer, "");
502 assertQueryEquals("((stop))^3", qpAnalyzer, "");
503 assertQueryEquals("(stop^3)", qpAnalyzer, "");
504 assertQueryEquals("((stop)^3)", qpAnalyzer, "");
505 assertQueryEquals("(stop)", qpAnalyzer, "");
506 assertQueryEquals("((stop))", qpAnalyzer, "");
507 assertTrue(getQuery("term term term", qpAnalyzer) instanceof BooleanQuery);
508 assertTrue(getQuery("term +stop", qpAnalyzer) instanceof TermQuery);
511 public void testRange() throws Exception {
512 assertQueryEquals("[ a TO z]", null, "[a TO z]");
513 assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)getQuery("[ a TO z]", null)).getRewriteMethod());
515 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
516 qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
517 assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE,((TermRangeQuery)qp.parse("[ a TO z]")).getRewriteMethod());
519 assertQueryEquals("[ a TO z ]", null, "[a TO z]");
520 assertQueryEquals("{ a TO z}", null, "{a TO z}");
521 assertQueryEquals("{ a TO z }", null, "{a TO z}");
522 assertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0");
523 assertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar");
524 assertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar");
525 assertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}");
526 assertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})");
529 public void testFarsiRangeCollating() throws Exception {
530 Directory ramDir = newDirectory();
531 IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
532 Document doc = new Document();
533 doc.add(newField("content","\u0633\u0627\u0628",
534 Field.Store.YES, Field.Index.NOT_ANALYZED));
537 IndexSearcher is = new IndexSearcher(ramDir, true);
539 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "content", new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
541 // Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
542 // RuleBasedCollator. However, the Arabic Locale seems to order the Farsi
543 // characters properly.
544 Collator c = Collator.getInstance(new Locale("ar"));
545 qp.setRangeCollator(c);
547 // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
548 // orders the U+0698 character before the U+0633 character, so the single
549 // index Term below should NOT be returned by a ConstantScoreRangeQuery
550 // with a Farsi Collator (or an Arabic one for the case when Farsi is not
553 // Test ConstantScoreRangeQuery
554 qp.setMultiTermRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
555 ScoreDoc[] result = is.search(qp.parse("[ \u062F TO \u0698 ]"), null, 1000).scoreDocs;
556 assertEquals("The index Term should not be included.", 0, result.length);
558 result = is.search(qp.parse("[ \u0633 TO \u0638 ]"), null, 1000).scoreDocs;
559 assertEquals("The index Term should be included.", 1, result.length);
561 // Test TermRangeQuery
562 qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
563 result = is.search(qp.parse("[ \u062F TO \u0698 ]"), null, 1000).scoreDocs;
564 assertEquals("The index Term should not be included.", 0, result.length);
566 result = is.search(qp.parse("[ \u0633 TO \u0638 ]"), null, 1000).scoreDocs;
567 assertEquals("The index Term should be included.", 1, result.length);
573 private String escapeDateString(String s) {
574 if (s.indexOf(" ") > -1) {
575 return "\"" + s + "\"";
581 /** for testing legacy DateField support */
582 private String getLegacyDate(String s) throws Exception {
583 DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
584 return DateField.dateToString(df.parse(s));
587 /** for testing DateTools support */
588 private String getDate(String s, DateTools.Resolution resolution) throws Exception {
589 DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
590 return getDate(df.parse(s), resolution);
593 /** for testing DateTools support */
594 private String getDate(Date d, DateTools.Resolution resolution) throws Exception {
595 if (resolution == null) {
596 return DateField.dateToString(d);
598 return DateTools.dateToString(d, resolution);
602 private String getLocalizedDate(int year, int month, int day) {
603 DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
604 Calendar calendar = new GregorianCalendar();
606 calendar.set(year, month, day);
607 calendar.set(Calendar.HOUR_OF_DAY, 23);
608 calendar.set(Calendar.MINUTE, 59);
609 calendar.set(Calendar.SECOND, 59);
610 calendar.set(Calendar.MILLISECOND, 999);
611 return df.format(calendar.getTime());
614 /** for testing legacy DateField support */
615 public void testLegacyDateRange() throws Exception {
616 String startDate = getLocalizedDate(2002, 1, 1);
617 String endDate = getLocalizedDate(2002, 1, 4);
618 Calendar endDateExpected = new GregorianCalendar();
619 endDateExpected.clear();
620 endDateExpected.set(2002, 1, 4, 23, 59, 59);
621 endDateExpected.set(Calendar.MILLISECOND, 999);
622 assertQueryEquals("[ " + escapeDateString(startDate) + " TO " + escapeDateString(endDate) + "]", null,
623 "[" + getLegacyDate(startDate) + " TO " + DateField.dateToString(endDateExpected.getTime()) + "]");
624 assertQueryEquals("{ " + escapeDateString(startDate) + " " + escapeDateString(endDate) + " }", null,
625 "{" + getLegacyDate(startDate) + " TO " + getLegacyDate(endDate) + "}");
628 public void testDateRange() throws Exception {
629 String startDate = getLocalizedDate(2002, 1, 1);
630 String endDate = getLocalizedDate(2002, 1, 4);
631 Calendar endDateExpected = new GregorianCalendar();
632 endDateExpected.clear();
633 endDateExpected.set(2002, 1, 4, 23, 59, 59);
634 endDateExpected.set(Calendar.MILLISECOND, 999);
635 final String defaultField = "default";
636 final String monthField = "month";
637 final String hourField = "hour";
638 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
640 // Don't set any date resolution and verify if DateField is used
641 assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
642 endDateExpected.getTime(), null);
644 // set a field specific date resolution
645 qp.setDateResolution(monthField, DateTools.Resolution.MONTH);
647 // DateField should still be used for defaultField
648 assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
649 endDateExpected.getTime(), null);
651 // set default date resolution to MILLISECOND
652 qp.setDateResolution(DateTools.Resolution.MILLISECOND);
654 // set second field specific date resolution
655 qp.setDateResolution(hourField, DateTools.Resolution.HOUR);
657 // for this field no field specific date resolution has been set,
658 // so verify if the default resolution is used
659 assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
660 endDateExpected.getTime(), DateTools.Resolution.MILLISECOND);
662 // verify if field specific date resolutions are used for these two fields
663 assertDateRangeQueryEquals(qp, monthField, startDate, endDate,
664 endDateExpected.getTime(), DateTools.Resolution.MONTH);
666 assertDateRangeQueryEquals(qp, hourField, startDate, endDate,
667 endDateExpected.getTime(), DateTools.Resolution.HOUR);
670 public void assertDateRangeQueryEquals(QueryParser qp, String field, String startDate, String endDate,
671 Date endDateInclusive, DateTools.Resolution resolution) throws Exception {
672 assertQueryEquals(qp, field, field + ":[" + escapeDateString(startDate) + " TO " + escapeDateString(endDate) + "]",
673 "[" + getDate(startDate, resolution) + " TO " + getDate(endDateInclusive, resolution) + "]");
674 assertQueryEquals(qp, field, field + ":{" + escapeDateString(startDate) + " TO " + escapeDateString(endDate) + "}",
675 "{" + getDate(startDate, resolution) + " TO " + getDate(endDate, resolution) + "}");
678 public void testEscaped() throws Exception {
679 Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
681 /*assertQueryEquals("\\[brackets", a, "\\[brackets");
682 assertQueryEquals("\\[brackets", null, "brackets");
683 assertQueryEquals("\\\\", a, "\\\\");
684 assertQueryEquals("\\+blah", a, "\\+blah");
685 assertQueryEquals("\\(blah", a, "\\(blah");
687 assertQueryEquals("\\-blah", a, "\\-blah");
688 assertQueryEquals("\\!blah", a, "\\!blah");
689 assertQueryEquals("\\{blah", a, "\\{blah");
690 assertQueryEquals("\\}blah", a, "\\}blah");
691 assertQueryEquals("\\:blah", a, "\\:blah");
692 assertQueryEquals("\\^blah", a, "\\^blah");
693 assertQueryEquals("\\[blah", a, "\\[blah");
694 assertQueryEquals("\\]blah", a, "\\]blah");
695 assertQueryEquals("\\\"blah", a, "\\\"blah");
696 assertQueryEquals("\\(blah", a, "\\(blah");
697 assertQueryEquals("\\)blah", a, "\\)blah");
698 assertQueryEquals("\\~blah", a, "\\~blah");
699 assertQueryEquals("\\*blah", a, "\\*blah");
700 assertQueryEquals("\\?blah", a, "\\?blah");
701 //assertQueryEquals("foo \\&\\& bar", a, "foo \\&\\& bar");
702 //assertQueryEquals("foo \\|| bar", a, "foo \\|| bar");
703 //assertQueryEquals("foo \\AND bar", a, "foo \\AND bar");*/
705 assertQueryEquals("\\a", a, "a");
707 assertQueryEquals("a\\-b:c", a, "a-b:c");
708 assertQueryEquals("a\\+b:c", a, "a+b:c");
709 assertQueryEquals("a\\:b:c", a, "a:b:c");
710 assertQueryEquals("a\\\\b:c", a, "a\\b:c");
712 assertQueryEquals("a:b\\-c", a, "a:b-c");
713 assertQueryEquals("a:b\\+c", a, "a:b+c");
714 assertQueryEquals("a:b\\:c", a, "a:b:c");
715 assertQueryEquals("a:b\\\\c", a, "a:b\\c");
717 assertQueryEquals("a:b\\-c*", a, "a:b-c*");
718 assertQueryEquals("a:b\\+c*", a, "a:b+c*");
719 assertQueryEquals("a:b\\:c*", a, "a:b:c*");
721 assertQueryEquals("a:b\\\\c*", a, "a:b\\c*");
723 assertQueryEquals("a:b\\-?c", a, "a:b-?c");
724 assertQueryEquals("a:b\\+?c", a, "a:b+?c");
725 assertQueryEquals("a:b\\:?c", a, "a:b:?c");
727 assertQueryEquals("a:b\\\\?c", a, "a:b\\?c");
729 assertQueryEquals("a:b\\-c~", a, "a:b-c~0.5");
730 assertQueryEquals("a:b\\+c~", a, "a:b+c~0.5");
731 assertQueryEquals("a:b\\:c~", a, "a:b:c~0.5");
732 assertQueryEquals("a:b\\\\c~", a, "a:b\\c~0.5");
734 assertQueryEquals("[ a\\- TO a\\+ ]", null, "[a- TO a+]");
735 assertQueryEquals("[ a\\: TO a\\~ ]", null, "[a: TO a~]");
736 assertQueryEquals("[ a\\\\ TO a\\* ]", null, "[a\\ TO a*]");
738 assertQueryEquals("[\"c\\:\\\\temp\\\\\\~foo0.txt\" TO \"c\\:\\\\temp\\\\\\~foo9.txt\"]", a,
739 "[c:\\temp\\~foo0.txt TO c:\\temp\\~foo9.txt]");
741 assertQueryEquals("a\\\\\\+b", a, "a\\+b");
743 assertQueryEquals("a \\\"b c\\\" d", a, "a \"b c\" d");
744 assertQueryEquals("\"a \\\"b c\\\" d\"", a, "\"a \"b c\" d\"");
745 assertQueryEquals("\"a \\+b c d\"", a, "\"a +b c d\"");
747 assertQueryEquals("c\\:\\\\temp\\\\\\~foo.txt", a, "c:\\temp\\~foo.txt");
749 assertParseException("XY\\"); // there must be a character after the escape char
751 // test unicode escaping
752 assertQueryEquals("a\\u0062c", a, "abc");
753 assertQueryEquals("XY\\u005a", a, "XYZ");
754 assertQueryEquals("XY\\u005A", a, "XYZ");
755 assertQueryEquals("\"a \\\\\\u0028\\u0062\\\" c\"", a, "\"a \\(b\" c\"");
757 assertParseException("XY\\u005G"); // test non-hex character in escaped unicode sequence
758 assertParseException("XY\\u005"); // test incomplete escaped unicode sequence
760 // Tests bug LUCENE-800
761 assertQueryEquals("(item:\\\\ item:ABCD\\\\)", a, "item:\\ item:ABCD\\");
762 assertParseException("(item:\\\\ item:ABCD\\\\))"); // unmatched closing paranthesis
763 assertQueryEquals("\\*", a, "*");
764 assertQueryEquals("\\\\", a, "\\"); // escaped backslash
766 assertParseException("\\"); // a backslash must always be escaped
769 assertQueryEquals("(\"a\\\\\") or (\"b\")", a ,"a\\ or b");
772 public void testQueryStringEscaping() throws Exception {
773 Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
775 assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
776 assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
777 assertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
778 assertEscapedQueryEquals("a\\b:c", a, "a\\\\b\\:c");
780 assertEscapedQueryEquals("a:b-c", a, "a\\:b\\-c");
781 assertEscapedQueryEquals("a:b+c", a, "a\\:b\\+c");
782 assertEscapedQueryEquals("a:b:c", a, "a\\:b\\:c");
783 assertEscapedQueryEquals("a:b\\c", a, "a\\:b\\\\c");
785 assertEscapedQueryEquals("a:b-c*", a, "a\\:b\\-c\\*");
786 assertEscapedQueryEquals("a:b+c*", a, "a\\:b\\+c\\*");
787 assertEscapedQueryEquals("a:b:c*", a, "a\\:b\\:c\\*");
789 assertEscapedQueryEquals("a:b\\\\c*", a, "a\\:b\\\\\\\\c\\*");
791 assertEscapedQueryEquals("a:b-?c", a, "a\\:b\\-\\?c");
792 assertEscapedQueryEquals("a:b+?c", a, "a\\:b\\+\\?c");
793 assertEscapedQueryEquals("a:b:?c", a, "a\\:b\\:\\?c");
795 assertEscapedQueryEquals("a:b?c", a, "a\\:b\\?c");
797 assertEscapedQueryEquals("a:b-c~", a, "a\\:b\\-c\\~");
798 assertEscapedQueryEquals("a:b+c~", a, "a\\:b\\+c\\~");
799 assertEscapedQueryEquals("a:b:c~", a, "a\\:b\\:c\\~");
800 assertEscapedQueryEquals("a:b\\c~", a, "a\\:b\\\\c\\~");
802 assertEscapedQueryEquals("[ a - TO a+ ]", null, "\\[ a \\- TO a\\+ \\]");
803 assertEscapedQueryEquals("[ a : TO a~ ]", null, "\\[ a \\: TO a\\~ \\]");
804 assertEscapedQueryEquals("[ a\\ TO a* ]", null, "\\[ a\\\\ TO a\\* \\]");
807 assertEscapedQueryEquals("|| abc ||", a, "\\|\\| abc \\|\\|");
808 assertEscapedQueryEquals("&& abc &&", a, "\\&\\& abc \\&\\&");
811 public void testTabNewlineCarriageReturn()
813 assertQueryEqualsDOA("+weltbank +worlbank", null,
814 "+weltbank +worlbank");
816 assertQueryEqualsDOA("+weltbank\n+worlbank", null,
817 "+weltbank +worlbank");
818 assertQueryEqualsDOA("weltbank \n+worlbank", null,
819 "+weltbank +worlbank");
820 assertQueryEqualsDOA("weltbank \n +worlbank", null,
821 "+weltbank +worlbank");
823 assertQueryEqualsDOA("+weltbank\r+worlbank", null,
824 "+weltbank +worlbank");
825 assertQueryEqualsDOA("weltbank \r+worlbank", null,
826 "+weltbank +worlbank");
827 assertQueryEqualsDOA("weltbank \r +worlbank", null,
828 "+weltbank +worlbank");
830 assertQueryEqualsDOA("+weltbank\r\n+worlbank", null,
831 "+weltbank +worlbank");
832 assertQueryEqualsDOA("weltbank \r\n+worlbank", null,
833 "+weltbank +worlbank");
834 assertQueryEqualsDOA("weltbank \r\n +worlbank", null,
835 "+weltbank +worlbank");
836 assertQueryEqualsDOA("weltbank \r \n +worlbank", null,
837 "+weltbank +worlbank");
839 assertQueryEqualsDOA("+weltbank\t+worlbank", null,
840 "+weltbank +worlbank");
841 assertQueryEqualsDOA("weltbank \t+worlbank", null,
842 "+weltbank +worlbank");
843 assertQueryEqualsDOA("weltbank \t +worlbank", null,
844 "+weltbank +worlbank");
847 public void testSimpleDAO()
849 assertQueryEqualsDOA("term term term", null, "+term +term +term");
850 assertQueryEqualsDOA("term +term term", null, "+term +term +term");
851 assertQueryEqualsDOA("term term +term", null, "+term +term +term");
852 assertQueryEqualsDOA("term +term +term", null, "+term +term +term");
853 assertQueryEqualsDOA("-term term term", null, "-term +term +term");
856 public void testBoost()
858 Set<Object> stopWords = new HashSet<Object>(1);
860 StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords);
861 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", oneStopAnalyzer);
862 Query q = qp.parse("on^1.0");
864 q = qp.parse("\"hello\"^2.0");
866 assertEquals(q.getBoost(), (float) 2.0, (float) 0.5);
867 q = qp.parse("hello^2.0");
869 assertEquals(q.getBoost(), (float) 2.0, (float) 0.5);
870 q = qp.parse("\"on\"^1.0");
873 QueryParser qp2 = new QueryParser(TEST_VERSION_CURRENT, "field", new StandardAnalyzer(TEST_VERSION_CURRENT));
874 q = qp2.parse("the^3");
875 // "the" is a stop word so the result is an empty query:
877 assertEquals("", q.toString());
878 assertEquals(1.0f, q.getBoost(), 0.01f);
881 public void assertParseException(String queryString) throws Exception {
883 getQuery(queryString, null);
884 } catch (ParseException expected) {
887 fail("ParseException expected, not thrown");
890 public void testException() throws Exception {
891 assertParseException("\"some phrase");
892 assertParseException("(foo bar");
893 assertParseException("foo bar))");
894 assertParseException("field:term:with:colon some more terms");
895 assertParseException("(sub query)^5.0^2.0 plus more");
896 assertParseException("secret AND illegal) AND access:confidential");
900 public void testCustomQueryParserWildcard() {
902 new QPTestParser("contents", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("a?t");
903 fail("Wildcard queries should not be allowed");
904 } catch (ParseException expected) {
905 // expected exception
909 public void testCustomQueryParserFuzzy() throws Exception {
911 new QPTestParser("contents", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("xunit~");
912 fail("Fuzzy queries should not be allowed");
913 } catch (ParseException expected) {
914 // expected exception
918 public void testBooleanQuery() throws Exception {
919 BooleanQuery.setMaxClauseCount(2);
921 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
922 qp.parse("one two three");
923 fail("ParseException expected due to too many boolean clauses");
924 } catch (ParseException expected) {
925 // too many boolean clauses, so ParseException is expected
930 * This test differs from TestPrecedenceQueryParser
932 public void testPrecedence() throws Exception {
933 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
934 Query query1 = qp.parse("A AND B OR C AND D");
935 Query query2 = qp.parse("+A +B +C +D");
936 assertEquals(query1, query2);
939 public void testLocalDateFormat() throws IOException, ParseException {
940 Directory ramDir = newDirectory();
941 IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
943 addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
944 addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
946 IndexSearcher is = new IndexSearcher(ramDir, true);
947 assertHits(1, "[12/1/2005 TO 12/3/2005]", is);
948 assertHits(2, "[12/1/2005 TO 12/4/2005]", is);
949 assertHits(1, "[12/3/2005 TO 12/4/2005]", is);
950 assertHits(1, "{12/1/2005 TO 12/3/2005}", is);
951 assertHits(1, "{12/1/2005 TO 12/4/2005}", is);
952 assertHits(0, "{12/3/2005 TO 12/4/2005}", is);
957 public void testStarParsing() throws Exception {
958 final int[] type = new int[1];
959 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)) {
961 protected Query getWildcardQuery(String field, String termStr) throws ParseException {
962 // override error checking of superclass
964 return new TermQuery(new Term(field,termStr));
967 protected Query getPrefixQuery(String field, String termStr) throws ParseException {
968 // override error checking of superclass
970 return new TermQuery(new Term(field,termStr));
974 protected Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException {
976 return super.getFieldQuery(field, queryText, quoted);
982 tq = (TermQuery)qp.parse("foo:zoo*");
983 assertEquals("zoo",tq.getTerm().text());
984 assertEquals(2,type[0]);
986 tq = (TermQuery)qp.parse("foo:zoo*^2");
987 assertEquals("zoo",tq.getTerm().text());
988 assertEquals(2,type[0]);
989 assertEquals(tq.getBoost(),2,0);
991 tq = (TermQuery)qp.parse("foo:*");
992 assertEquals("*",tq.getTerm().text());
993 assertEquals(1,type[0]); // could be a valid prefix query in the future too
995 tq = (TermQuery)qp.parse("foo:*^2");
996 assertEquals("*",tq.getTerm().text());
997 assertEquals(1,type[0]);
998 assertEquals(tq.getBoost(),2,0);
1000 tq = (TermQuery)qp.parse("*:foo");
1001 assertEquals("*",tq.getTerm().field());
1002 assertEquals("foo",tq.getTerm().text());
1003 assertEquals(3,type[0]);
1005 tq = (TermQuery)qp.parse("*:*");
1006 assertEquals("*",tq.getTerm().field());
1007 assertEquals("*",tq.getTerm().text());
1008 assertEquals(1,type[0]); // could be handled as a prefix query in the future
1010 tq = (TermQuery)qp.parse("(*:*)");
1011 assertEquals("*",tq.getTerm().field());
1012 assertEquals("*",tq.getTerm().text());
1013 assertEquals(1,type[0]);
1017 public void testStopwords() throws Exception {
1018 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new StopAnalyzer(TEST_VERSION_CURRENT, StopFilter.makeStopSet(TEST_VERSION_CURRENT, "the", "foo")));
1019 Query result = qp.parse("a:the OR a:foo");
1020 assertNotNull("result is null and it shouldn't be", result);
1021 assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery);
1022 assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 0, ((BooleanQuery) result).clauses().size() == 0);
1023 result = qp.parse("a:woo OR a:the");
1024 assertNotNull("result is null and it shouldn't be", result);
1025 assertTrue("result is not a TermQuery", result instanceof TermQuery);
1026 result = qp.parse("(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)");
1027 assertNotNull("result is null and it shouldn't be", result);
1028 assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery);
1029 if (VERBOSE) System.out.println("Result: " + result);
1030 assertTrue(((BooleanQuery) result).clauses().size() + " does not equal: " + 2, ((BooleanQuery) result).clauses().size() == 2);
1033 public void testPositionIncrement() throws Exception {
1034 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new StopAnalyzer(TEST_VERSION_CURRENT, StopFilter.makeStopSet(TEST_VERSION_CURRENT, "the", "in", "are", "this")));
1035 qp.setEnablePositionIncrements(true);
1036 String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
1038 int expectedPositions[] = {1,3,4,6,9};
1039 PhraseQuery pq = (PhraseQuery) qp.parse(qtxt);
1040 //System.out.println("Query text: "+qtxt);
1041 //System.out.println("Result: "+pq);
1042 Term t[] = pq.getTerms();
1043 int pos[] = pq.getPositions();
1044 for (int i = 0; i < t.length; i++) {
1045 //System.out.println(i+". "+t[i]+" pos: "+pos[i]);
1046 assertEquals("term "+i+" = "+t[i]+" has wrong term-position!",expectedPositions[i],pos[i]);
1050 public void testMatchAllDocs() throws Exception {
1051 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
1052 assertEquals(new MatchAllDocsQuery(), qp.parse("*:*"));
1053 assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)"));
1054 BooleanQuery bq = (BooleanQuery)qp.parse("+*:* -*:*");
1055 assertTrue(bq.getClauses()[0].getQuery() instanceof MatchAllDocsQuery);
1056 assertTrue(bq.getClauses()[1].getQuery() instanceof MatchAllDocsQuery);
1059 private void assertHits(int expected, String query, IndexSearcher is) throws ParseException, IOException {
1060 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "date", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
1061 qp.setLocale(Locale.ENGLISH);
1062 Query q = qp.parse(query);
1063 ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
1064 assertEquals(expected, hits.length);
1067 private void addDateDoc(String content, int year, int month,
1068 int day, int hour, int minute, int second, IndexWriter iw) throws IOException {
1069 Document d = new Document();
1070 d.add(newField("f", content, Field.Store.YES, Field.Index.ANALYZED));
1071 Calendar cal = Calendar.getInstance(Locale.ENGLISH);
1072 cal.set(year, month-1, day, hour, minute, second);
1073 d.add(newField("date", DateField.dateToString(cal.getTime()), Field.Store.YES, Field.Index.NOT_ANALYZED));
1078 public void tearDown() throws Exception {
1079 BooleanQuery.setMaxClauseCount(originalMaxClauses);
1083 // LUCENE-2002: make sure defaults for StandardAnalyzer's
1084 // enableStopPositionIncr & QueryParser's enablePosIncr
1086 public void testPositionIncrements() throws Exception {
1087 Directory dir = newDirectory();
1088 Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT);
1089 IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, a));
1090 Document doc = new Document();
1091 doc.add(newField("f", "the wizard of ozzy", Field.Store.NO, Field.Index.ANALYZED));
1093 IndexReader r = IndexReader.open(w, true);
1095 IndexSearcher s = newSearcher(r);
1096 QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "f", a);
1097 Query q = qp.parse("\"wizard of ozzy\"");
1098 assertEquals(1, s.search(q, 1).totalHits);
1104 // LUCENE-2002: when we run javacc to regen QueryParser,
1105 // we also run a replaceregexp step to fix 2 of the public
1106 // ctors (change them to protected):
1108 // protected QueryParser(CharStream stream)
1110 // protected QueryParser(QueryParserTokenManager tm)
1112 // This test is here as a safety, in case that ant step
1113 // doesn't work for some reason.
1114 public void testProtectedCtors() throws Exception {
1116 QueryParser.class.getConstructor(new Class[] {CharStream.class});
1117 fail("please switch public QueryParser(CharStream) to be protected");
1118 } catch (NoSuchMethodException nsme) {
1122 QueryParser.class.getConstructor(new Class[] {QueryParserTokenManager.class});
1123 fail("please switch public QueryParser(QueryParserTokenManager) to be protected");
1124 } catch (NoSuchMethodException nsme) {