1 package org.apache.lucene.index;
4 * Licensed to the Apache Software Foundation (ASF) under one or more
5 * contributor license agreements. See the NOTICE file distributed with
6 * this work for additional information regarding copyright ownership.
7 * The ASF licenses this file to You under the Apache License, Version 2.0
8 * (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
20 import java.io.IOException;
21 import java.util.concurrent.atomic.AtomicInteger;
23 import org.apache.lucene.store.Directory;
24 import org.apache.lucene.store.IndexInput;
25 import org.apache.lucene.util.IOUtils;
27 /** Holds core readers that are shared (unchanged) when
28 * SegmentReader is cloned or reopened */
29 final class SegmentCoreReaders {
31 // Counts how many other reader share the core objects
32 // (freqStream, proxStream, tis, etc.) of this reader;
33 // when coreRef drops to 0, these core objects may be
34 // closed. A given instance of SegmentReader may be
35 // closed, even those it shares core objects with other
37 private final AtomicInteger ref = new AtomicInteger(1);
40 final FieldInfos fieldInfos;
41 final IndexInput freqStream;
42 final IndexInput proxStream;
43 final TermInfosReader tisNoIndex;
46 final Directory cfsDir;
47 final int readBufferSize;
48 final int termsIndexDivisor;
50 private final SegmentReader owner;
53 FieldsReader fieldsReaderOrig;
54 TermVectorsReader termVectorsReaderOrig;
55 CompoundFileReader cfsReader;
56 CompoundFileReader storeCFSReader;
58 SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentInfo si, int readBufferSize, int termsIndexDivisor) throws IOException {
60 this.readBufferSize = readBufferSize;
63 boolean success = false;
67 if (si.getUseCompoundFile()) {
68 cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
73 fieldInfos = new FieldInfos(cfsDir, IndexFileNames.segmentFileName(segment, IndexFileNames.FIELD_INFOS_EXTENSION));
75 this.termsIndexDivisor = termsIndexDivisor;
76 TermInfosReader reader = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize, termsIndexDivisor);
77 if (termsIndexDivisor == -1) {
84 // make sure that all index files have been read or are kept open
85 // so that if an index update removes them we'll still have them
86 freqStream = cfsDir.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.FREQ_EXTENSION), readBufferSize);
88 if (fieldInfos.hasProx()) {
89 proxStream = cfsDir.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.PROX_EXTENSION), readBufferSize);
100 // Must assign this at the end -- if we hit an
101 // exception above core, we don't want to attempt to
102 // purge the FieldCache (will hit NPE because core is
103 // not assigned yet).
107 synchronized TermVectorsReader getTermVectorsReaderOrig() {
108 return termVectorsReaderOrig;
111 synchronized FieldsReader getFieldsReaderOrig() {
112 return fieldsReaderOrig;
115 synchronized void incRef() {
116 ref.incrementAndGet();
119 synchronized Directory getCFSReader() {
123 synchronized TermInfosReader getTermsReader() {
131 synchronized boolean termsIndexIsLoaded() {
135 // NOTE: only called from IndexWriter when a near
136 // real-time reader is opened, or applyDeletes is run,
137 // sharing a segment that's still being merged. This
138 // method is not fully thread safe, and relies on the
139 // synchronization in IndexWriter
140 synchronized void loadTermsIndex(SegmentInfo si, int termsIndexDivisor) throws IOException {
143 if (si.getUseCompoundFile()) {
144 // In some cases, we were originally opened when CFS
145 // was not used, but then we are asked to open the
146 // terms reader with index, the segment has switched
148 if (cfsReader == null) {
149 cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
156 tis = new TermInfosReader(dir0, segment, fieldInfos, readBufferSize, termsIndexDivisor);
160 synchronized void decRef() throws IOException {
162 if (ref.decrementAndGet() == 0) {
163 IOUtils.close(tis, tisNoIndex, freqStream, proxStream, termVectorsReaderOrig,
164 fieldsReaderOrig, cfsReader, storeCFSReader);
166 // Now, notify any ReaderFinished listeners:
168 owner.notifyReaderFinishedListeners();
173 synchronized void openDocStores(SegmentInfo si) throws IOException {
175 assert si.name.equals(segment);
177 if (fieldsReaderOrig == null) {
178 final Directory storeDir;
179 if (si.getDocStoreOffset() != -1) {
180 if (si.getDocStoreIsCompoundFile()) {
181 assert storeCFSReader == null;
182 storeCFSReader = new CompoundFileReader(dir,
183 IndexFileNames.segmentFileName(si.getDocStoreSegment(), IndexFileNames.COMPOUND_FILE_STORE_EXTENSION),
185 storeDir = storeCFSReader;
186 assert storeDir != null;
189 assert storeDir != null;
191 } else if (si.getUseCompoundFile()) {
192 // In some cases, we were originally opened when CFS
193 // was not used, but then we are asked to open doc
194 // stores after the segment has switched to CFS
195 if (cfsReader == null) {
196 cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
198 storeDir = cfsReader;
199 assert storeDir != null;
202 assert storeDir != null;
205 final String storesSegment;
206 if (si.getDocStoreOffset() != -1) {
207 storesSegment = si.getDocStoreSegment();
209 storesSegment = segment;
212 fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize,
213 si.getDocStoreOffset(), si.docCount);
215 // Verify two sources of "maxDoc" agree:
216 if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) {
217 throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount);
220 if (si.getHasVectors()) { // open term vector files only as needed
221 termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);
227 public String toString() {
228 return "SegmentCoreReader(owner=" + owner + ")";