Projects >> lucene-solr >>88d2e854074f25a3fd54f66998271bcaeca7a232

Chunk
Conflicting content
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
<<<<<<< HEAD
import org.apache.lucene.index.codecs.FieldsProducer;
import org.apache.lucene.index.codecs.PerDocValues;
=======
>>>>>>> 00c35bfec344874e67eef72aeef0db224489d07d
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
Solution content
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.index.codecs.PerDocValues;
import org.apache.lucene.index.values.DocValues;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
File
SegmentReader.java
Developer's decision
Manual
Kind of conflict
Import
Chunk
Conflicting content
  IndexInput singleNormStream;
  AtomicInteger singleNormRef;

<<<<<<< HEAD
  CoreReaders core;

  // Holds core readers that are shared (unchanged) when
  // SegmentReader is cloned or reopened
  static final class CoreReaders {

    // Counts how many other reader share the core objects
    // (freqStream, proxStream, tis, etc.) of this reader;
    // when coreRef drops to 0, these core objects may be
    // closed.  A given instance of SegmentReader may be
    // closed, even those it shares core objects with other
    // SegmentReaders:
    private final AtomicInteger ref = new AtomicInteger(1);

    final String segment;
    final FieldInfos fieldInfos;

    final FieldsProducer fields;
    final PerDocValues perDocProducer;
    
    final Directory dir;
    final Directory cfsDir;
    final int readBufferSize;
    final int termsIndexDivisor;

    private final SegmentReader origInstance;

    FieldsReader fieldsReaderOrig;
    TermVectorsReader termVectorsReaderOrig;
    CompoundFileReader cfsReader;
    CompoundFileReader storeCFSReader;

    CoreReaders(SegmentReader origInstance, Directory dir, SegmentInfo si, int readBufferSize, int termsIndexDivisor) throws IOException {

      if (termsIndexDivisor == 0) {
        throw new IllegalArgumentException("indexDivisor must be < 0 (don't load terms index) or greater than 0 (got 0)");
      }

      segment = si.name;
      final SegmentCodecs segmentCodecs = si.getSegmentCodecs();
      this.readBufferSize = readBufferSize;
      this.dir = dir;

      boolean success = false;

      try {
        Directory dir0 = dir;
        if (si.getUseCompoundFile()) {
          cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
          dir0 = cfsReader;
        }
        cfsDir = dir0;
        si.loadFieldInfos(cfsDir, false); // prevent opening the CFS to load fieldInfos
        fieldInfos = si.getFieldInfos();
        
        this.termsIndexDivisor = termsIndexDivisor;
        
        // Ask codec for its Fields
        final SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si, fieldInfos, readBufferSize, termsIndexDivisor);
        fields = segmentCodecs.codec().fieldsProducer(segmentReadState);
        assert fields != null;
        perDocProducer = segmentCodecs.codec().docsProducer(segmentReadState);
        success = true;
      } finally {
        if (!success) {
          decRef();
        }
      }

      // Must assign this at the end -- if we hit an
      // exception above core, we don't want to attempt to
      // purge the FieldCache (will hit NPE because core is
      // not assigned yet).
      this.origInstance = origInstance;
    }
    
    

    synchronized TermVectorsReader getTermVectorsReaderOrig() {
      return termVectorsReaderOrig;
    }

    synchronized FieldsReader getFieldsReaderOrig() {
      return fieldsReaderOrig;
    }

    synchronized void incRef() {
      ref.incrementAndGet();
    }

    synchronized Directory getCFSReader() {
      return cfsReader;
    }

    synchronized void decRef() throws IOException {
      if (ref.decrementAndGet() == 0) {
        if (fields != null) {
          fields.close();
        }
        
        if (perDocProducer != null) {
          perDocProducer.close();
        }

        if (termVectorsReaderOrig != null) {
          termVectorsReaderOrig.close();
        }
  
        if (fieldsReaderOrig != null) {
          fieldsReaderOrig.close();
        }
  
        if (cfsReader != null) {
          cfsReader.close();
        }
  
        if (storeCFSReader != null) {
          storeCFSReader.close();
        }

        // Now, notify any ReaderFinished listeners:
        if (origInstance != null) {
          origInstance.notifyReaderFinishedListeners();
        }
      }
    }

    synchronized void openDocStores(SegmentInfo si) throws IOException {

      assert si.name.equals(segment);

      if (fieldsReaderOrig == null) {
        final Directory storeDir;
        if (si.getDocStoreOffset() != -1) {
          if (si.getDocStoreIsCompoundFile()) {
            assert storeCFSReader == null;
            storeCFSReader = new CompoundFileReader(dir,
                IndexFileNames.segmentFileName(si.getDocStoreSegment(), "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION),
                                                    readBufferSize);
            storeDir = storeCFSReader;
            assert storeDir != null;
          } else {
            storeDir = dir;
            assert storeDir != null;
          }
        } else if (si.getUseCompoundFile()) {
          // In some cases, we were originally opened when CFS
          // was not used, but then we are asked to open doc
          // stores after the segment has switched to CFS
          if (cfsReader == null) {
            cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
          }
          storeDir = cfsReader;
          assert storeDir != null;
        } else {
          storeDir = dir;
          assert storeDir != null;
        }

        final String storesSegment = si.getDocStoreSegment();
        fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize,
                                            si.getDocStoreOffset(), si.docCount);

        // Verify two sources of "maxDoc" agree:
        if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) {
          throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount);
        }

        if (si.getHasVectors()) { // open term vector files only as needed
          termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);
        }
      }
    }
  }
=======
  SegmentCoreReaders core;
>>>>>>> 00c35bfec344874e67eef72aeef0db224489d07d

  /**
   * Sets the initial value 
Solution content
  /**
  SegmentCoreReaders core;

  IndexInput singleNormStream;
  AtomicInteger singleNormRef;

   * Sets the initial value 
File
SegmentReader.java
Developer's decision
Version 2
Kind of conflict
Attribute
Class declaration
Comment