pylucene 3.5.0-3
[pylucene.git] / lucene-java-3.4.0 / lucene / src / java / org / apache / lucene / index / DocFieldProcessor.java
diff --git a/lucene-java-3.4.0/lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java b/lucene-java-3.4.0/lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java
deleted file mode 100644 (file)
index 857b3fb..0000000
+++ /dev/null
@@ -1,88 +0,0 @@
-package org.apache.lucene.index;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-import java.util.HashMap;
-
-
-/**
- * This is a DocConsumer that gathers all fields under the
- * same name, and calls per-field consumers to process field
- * by field.  This class doesn't doesn't do any "real" work
- * of its own: it just forwards the fields to a
- * DocFieldConsumer.
- */
-
-final class DocFieldProcessor extends DocConsumer {
-
-  final DocumentsWriter docWriter;
-  final FieldInfos fieldInfos;
-  final DocFieldConsumer consumer;
-  final StoredFieldsWriter fieldsWriter;
-
-  public DocFieldProcessor(DocumentsWriter docWriter, DocFieldConsumer consumer) {
-    this.docWriter = docWriter;
-    this.consumer = consumer;
-    fieldInfos = docWriter.getFieldInfos();
-    consumer.setFieldInfos(fieldInfos);
-    fieldsWriter = new StoredFieldsWriter(docWriter, fieldInfos);
-  }
-
-  @Override
-  public void flush(Collection<DocConsumerPerThread> threads, SegmentWriteState state) throws IOException {
-
-    Map<DocFieldConsumerPerThread, Collection<DocFieldConsumerPerField>> childThreadsAndFields = new HashMap<DocFieldConsumerPerThread, Collection<DocFieldConsumerPerField>>();
-    for ( DocConsumerPerThread thread : threads) {
-      DocFieldProcessorPerThread perThread = (DocFieldProcessorPerThread) thread;
-      childThreadsAndFields.put(perThread.consumer, perThread.fields());
-      perThread.trimFields(state);
-    }
-
-    fieldsWriter.flush(state);
-    consumer.flush(childThreadsAndFields, state);
-
-    // Important to save after asking consumer to flush so
-    // consumer can alter the FieldInfo* if necessary.  EG,
-    // FreqProxTermsWriter does this with
-    // FieldInfo.storePayload.
-    final String fileName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.FIELD_INFOS_EXTENSION);
-    fieldInfos.write(state.directory, fileName);
-  }
-
-  @Override
-  public void abort() {
-    try {
-      fieldsWriter.abort();
-    } finally {
-      consumer.abort();
-    }
-  }
-
-  @Override
-  public boolean freeRAM() {
-    return consumer.freeRAM();
-  }
-
-  @Override
-  public DocConsumerPerThread addThread(DocumentsWriterThreadState threadState) throws IOException {
-    return new DocFieldProcessorPerThread(threadState, this);
-  }
-}